Skip to content
Snippets Groups Projects

feat(cache): Add default fetch for loading cache to simplify usage

Merged Martin Lowe requested to merge malowe/main/projects-model-update into main
1 unresolved thread
Files
14
@@ -51,197 +51,206 @@ import jakarta.enterprise.context.ApplicationScoped;
@ApplicationScoped
public class DefaultLoadingCacheManager implements LoadingCacheManager {
private final LoadingCacheConfig config;
private final List<InstanceHandle<LoadingCacheProvider<?>>> providers;
private final ManagedExecutor executor;
private final DistributedCacheService dcs;
private final LoadingCacheConfig config;
private final List<InstanceHandle<LoadingCacheProvider<?>>> providers;
private final ManagedExecutor executor;
private final DistributedCacheService dcs;
// internal list of managed loading caches
private List<LoadingCacheWrapper<?>> caches;
// internal list of managed loading caches
private List<LoadingCacheWrapper<?>> caches;
/**
* Default constructor for LCM, includes required configs, providers, and the executor for processing.
*
* @param config loading cache configuration mapping for runtime
* @param providers list of all available loaders for the cache
* @param executor context-aware executor for loading cache async
*/
public DefaultLoadingCacheManager(LoadingCacheConfig config, @All List<InstanceHandle<LoadingCacheProvider<?>>> providers,
DistributedCacheService dcs, ManagedExecutor executor) {
this.config = config;
this.providers = providers;
this.executor = executor;
this.dcs = dcs;
/**
* Default constructor for LCM, includes required configs, providers, and the executor for processing.
*
* @param config loading cache configuration mapping for runtime
* @param providers list of all available loaders for the cache
* @param executor context-aware executor for loading cache async
*/
public DefaultLoadingCacheManager(LoadingCacheConfig config, @All List<InstanceHandle<LoadingCacheProvider<?>>> providers,
DistributedCacheService dcs, ManagedExecutor executor) {
this.config = config;
this.providers = providers;
this.executor = executor;
this.dcs = dcs;
}
/**
* Using the loading cache providers, generate active loading caches for the application.
*/
@PostConstruct
void generateCaches() {
// instantiate the caches, using the handles to build named caches, needs to use collect as toList can't cast properly
this.caches = providers
.stream()
.map(p -> new LoadingCacheWrapper<>(config.loaders().getOrDefault(p.getBean().getName(), config.defaults()), p.get()))
.collect(Collectors.toList());
// grouping by inner type, ensure that there is only 1 cache per type.
// We could support multiple caches for the same type eventually, but there is no current use case
if (this.caches
.stream()
.collect(Collectors.groupingBy(LoadingCacheWrapper::getInnerType))
.entrySet()
.stream()
.anyMatch(e -> e.getValue().size() > 1)) {
throw new IllegalStateException("Ambiguous loading caches found, will not continue to load.");
}
// do the preloading keys if present, discarding the unneeded result after calculation
this.caches
.stream()
.filter(c -> c.config.startAtBoot())
.forEach(c -> lookupLoadingCache(
ParameterizedCacheKeyBuilder.builder().clazz(c.innerType).id(c.config.runtimeBootKey().orElse("all")).build(), c));
}
/**
* Using the loading cache providers, generate active loading caches for the application.
*/
@PostConstruct
void generateCaches() {
// instantiate the caches, using the handles to build named caches, needs to use collect as toList can't cast properly
this.caches = providers
.stream()
.map(p -> new LoadingCacheWrapper<>(config.loaders().getOrDefault(p.getBean().getName(), config.defaults()), p.get()))
.collect(Collectors.toList());
// grouping by inner type, ensure that there is only 1 cache per type.
// We could support multiple caches for the same type eventually, but there is no current use case
if (this.caches
.stream()
.collect(Collectors.groupingBy(LoadingCacheWrapper::getInnerType))
.entrySet()
.stream()
.anyMatch(e -> e.getValue().size() > 1)) {
throw new IllegalStateException("Ambiguous loading caches found, will not continue to load.");
}
// do the preloading keys if present, discarding the unneeded result after calculation
this.caches
.stream()
.filter(c -> c.config.startAtBoot())
.forEach(c -> lookupLoadingCache(
ParameterizedCacheKeyBuilder.builder().clazz(c.innerType).id(c.config.runtimeBootKey().get()).build(), c));
@Override
public <T> List<T> getDefaultList(Class<T> clazz) {
Optional<LoadingCacheWrapper<?>> cache = caches.stream().filter(c -> c.getInnerType().equals(clazz)).findFirst();
if (cache.isEmpty()) {
throw new IllegalStateException("Could not find a provided loading cache mapped for type: " + clazz.getSimpleName());
}
// do the checked lookup of the cache data
return lookupLoadingCache(new ParameterizedCacheKey(cache.get().getLoaderConfig().runtimeBootKey().orElse("all"), clazz, null),
cache.get());
}
@Override
public <T> List<T> getList(ParameterizedCacheKey k) {
Optional<LoadingCacheWrapper<?>> cache = caches.stream().filter(c -> c.getInnerType().equals(k.clazz())).findFirst();
if (cache.isEmpty()) {
throw new IllegalStateException("Could not find a provided loading cache mapped for type: " + k.clazz().getSimpleName());
}
// do the checked lookup of the cache data
return lookupLoadingCache(k, cache.get());
@Override
public <T> List<T> getList(ParameterizedCacheKey k) {
Optional<LoadingCacheWrapper<?>> cache = caches.stream().filter(c -> c.getInnerType().equals(k.clazz())).findFirst();
if (cache.isEmpty()) {
throw new IllegalStateException("Could not find a provided loading cache mapped for type: " + k.clazz().getSimpleName());
}
// do the checked lookup of the cache data
return lookupLoadingCache(k, cache.get());
}
@Override
public List<LoadingCacheWrapper<?>> getManagedCaches() {
return Collections.unmodifiableList(caches);
}
@Override
public List<LoadingCacheWrapper<?>> getManagedCaches() {
return Collections.unmodifiableList(caches);
/**
* Using the key and the discovered cache, do a lookup of the key, using the configured timeout and rethrowing any exceptions in execution
* upstream.
*
* @param <T> the type of data returned by the cache
* @param k the cache key to pass to the loading cache for value retrieval
* @param cache the cache wrapper that can retrieve the value for the current request
* @return the list of results for the call to the cache
* @throws RuntimeException when there is a timeout or error in the execution of loading the cache value
*/
@SuppressWarnings("unchecked")
private <T> List<T> lookupLoadingCache(ParameterizedCacheKey k, LoadingCacheWrapper<?> cache) {
// this is checked through the above inner class comparison, so this should be a safe lookup
LoadingCacheWrapper<T> typedCache = (LoadingCacheWrapper<T>) cache;
try {
// do the lookup of the cached data, using the configured timeout
if (typedCache.config.useDistributedCache()) {
Optional<List<T>> opt = dcs.getList(k.id(), k.params(), typedCache.innerType, () -> typedCache.loader.apply(k)).data();
// we don't want to continue processing as this is an error state
if (opt.isEmpty()) {
throw new RuntimeException("Error while fetching precache results");
}
return opt.get();
} else {
return typedCache.cache.get(k).get(typedCache.config.timeout(), TimeUnit.SECONDS);
}
} catch (TimeoutException | ExecutionException e) {
// rethrow upstream. In the future, we may add handlers here
throw new RuntimeException(e);
} catch (InterruptedException e) {
// reinterrupt, as we don't care about our state in this case
Thread.currentThread().interrupt();
}
// this is only technically possible but shouldn't happen, as interrupts shouldn't resolve
return Collections.emptyList();
}
/**
* Checks if a cache should be refreshed by checking the last refresh time, and if so iterating all active keys and attempt a refresh.
* This is used for cases where we need to force the cache to refresh before it is called by downstream methods to have the most accurate
* data possible.
*/
@Scheduled(every = "5m")
void refreshKeys() {
caches
.stream()
.filter(wrapper -> wrapper.config.forceRefresh())
.forEach(wrapper -> wrapper.cache.getAll(wrapper.cache.asMap().keySet()));
}
/**
* Typed generic wrapper for loading caches. This will help match on internal types of expected output and increase type safety around
* lookups, as well as provide best matching configuration values for the given cache.
*
* @author Martin Lowe
*
* @param <T> the internal type of content produced by the loading cache
*/
public final class LoadingCacheWrapper<T> {
private final AsyncLoadingCache<ParameterizedCacheKey, List<T>> cache;
private final Function<ParameterizedCacheKey, List<T>> loader;
private final Class<T> innerType;
private final LoaderDefinition config;
/**
* Using the key and the discovered cache, do a lookup of the key, using the configured timeout and rethrowing any exceptions in
* execution upstream.
* Builds the wrapper with assembled cache, the configurations that apply to the cache to reduce need for reflection, and the internal
* type for easier matching on incoming requests.
*
* @param <T> the type of data returned by the cache
* @param k the cache key to pass to the loading cache for value retrieval
* @param cache the cache wrapper that can retrieve the value for the current request
* @return the list of results for the call to the cache
* @throws RuntimeException when there is a timeout or error in the execution of loading the cache value
* @param cache the wrapped asynchronous cache
* @param innerType the return type of the loading cache
* @param config relevant settings for the current cache
*/
@SuppressWarnings("unchecked")
private <T> List<T> lookupLoadingCache(ParameterizedCacheKey k, LoadingCacheWrapper<?> cache) {
// this is checked through the above inner class comparison, so this should be a safe lookup
LoadingCacheWrapper<T> typedCache = (LoadingCacheWrapper<T>) cache;
try {
// do the lookup of the cached data, using the configured timeout
if (typedCache.config.useDistributedCache()) {
Optional<List<T>> opt = dcs
.getList(k.id(), k.params(), typedCache.innerType, () -> typedCache.loader.apply(k))
.data();
// we don't want to continue processing as this is an error state
if (opt.isEmpty()) {
throw new RuntimeException("Error while fetching precache results");
}
return opt.get();
} else {
return typedCache.cache.get(k).get(typedCache.config.timeout(), TimeUnit.SECONDS);
}
} catch (TimeoutException | ExecutionException e) {
// rethrow upstream. In the future, we may add handlers here
throw new RuntimeException(e);
} catch (InterruptedException e) {
// reinterrupt, as we don't care about our state in this case
Thread.currentThread().interrupt();
}
// this is only technically possible but shouldn't happen, as interrupts shouldn't resolve
return Collections.emptyList();
private LoadingCacheWrapper(LoaderDefinition config, LoadingCacheProvider<T> provider) {
this.cache = config.useDistributedCache() ? null
: Caffeine
.newBuilder()
.executor(executor)
.refreshAfterWrite(config.refreshAfter())
.maximumSize(config.maximumSize())
.buildAsync(provider::fetchData);
this.innerType = Objects.requireNonNull(provider.getType());
this.config = Objects.requireNonNull(config);
this.loader = provider::fetchData;
}
/**
* Checks if a cache should be refreshed by checking the last refresh time, and if so iterating all active keys and attempt a refresh.
* This is used for cases where we need to force the cache to refresh before it is called by downstream methods to have the most
* accurate data possible.
* The loading cache used to maintain preloaded assets.
*
* @return the loading cache for the wrapper.
*/
@Scheduled(every = "5m")
void refreshKeys() {
caches
.stream()
.filter(wrapper -> wrapper.config.forceRefresh())
.forEach(wrapper -> wrapper.cache.getAll(wrapper.cache.asMap().keySet()));
public AsyncLoadingCache<ParameterizedCacheKey, List<T>> getCache() {
return this.cache;
}
/**
* Typed generic wrapper for loading caches. This will help match on internal types of expected output and increase type safety around
* lookups, as well as provide best matching configuration values for the given cache.
* Retrieves a view of the cache keys for the current cache. Will only show keys that have realized values and not values that are in
* the process of being fetched.
*
* @author Martin Lowe
*
* @param <T> the internal type of content produced by the loading cache
* @return list of cache keys for finished cache results.
*/
public final class LoadingCacheWrapper<T> {
private final AsyncLoadingCache<ParameterizedCacheKey, List<T>> cache;
private final Function<ParameterizedCacheKey, List<T>> loader;
private final Class<T> innerType;
private final LoaderDefinition config;
/**
* Builds the wrapper with assembled cache, the configurations that apply to the cache to reduce need for reflection, and the
* internal type for easier matching on incoming requests.
*
* @param cache the wrapped asynchronous cache
* @param innerType the return type of the loading cache
* @param config relevant settings for the current cache
*/
private LoadingCacheWrapper(LoaderDefinition config, LoadingCacheProvider<T> provider) {
this.cache = config.useDistributedCache() ? null
: Caffeine
.newBuilder()
.executor(executor)
.refreshAfterWrite(config.refreshAfter())
.maximumSize(config.maximumSize())
.buildAsync(provider::fetchData);
this.innerType = Objects.requireNonNull(provider.getType());
this.config = Objects.requireNonNull(config);
this.loader = provider::fetchData;
}
/**
* The loading cache used to maintain preloaded assets.
*
* @return the loading cache for the wrapper.
*/
public AsyncLoadingCache<ParameterizedCacheKey, List<T>> getCache() {
return this.cache;
}
/**
* Retrieves a view of the cache keys for the current cache. Will only show keys that have realized values and not values that are
* in the process of being fetched.
*
* @return list of cache keys for finished cache results.
*/
public List<ParameterizedCacheKey> getCacheKeys() {
return new ArrayList<>(this.cache.asMap().keySet());
}
public List<ParameterizedCacheKey> getCacheKeys() {
return new ArrayList<>(this.cache.asMap().keySet());
}
/**
* The internal type of content returned by the loading cache operations.
*
* @return literal class for internal content type.
*/
public Class<T> getInnerType() {
return this.innerType;
}
/**
* The internal type of content returned by the loading cache operations.
*
* @return literal class for internal content type.
*/
public Class<T> getInnerType() {
return this.innerType;
}
/**
* Gets the pre-loading cache configuration that defines refresh periods, whether to enforce forced refreshes, and the timeout
* periods to reduce risk of stalled operations.
*
* @return the preloading configuration for the wrapped cache.
*/
public LoaderDefinition getLoaderConfig() {
return this.config;
}
/**
* Gets the pre-loading cache configuration that defines refresh periods, whether to enforce forced refreshes, and the timeout periods
* to reduce risk of stalled operations.
*
* @return the preloading configuration for the wrapped cache.
*/
public LoaderDefinition getLoaderConfig() {
return this.config;
}
}
}
Loading