Skip to content
Snippets Groups Projects
Commit f333cda6 authored by Martin Lowe's avatar Martin Lowe :flag_ca:
Browse files

Iss #104 - Add standard cache, precache, and SQL metrics summaries

Using the micrometer extension, metrics have been added for SQL and
caching calls, which capture uncached/raw performance of lookups.

The SQL timer summaries were handled manually rather than through the
annotations, as the augmentation doesn't apply properly when the beans
are synthetically added during deployment.

Resolves #104
parent f164632d
No related branches found
No related tags found
2 merge requests!200Merge 1.0 release candidate code into trunk,!199Iss #104 - Add standard cache, precache, and SQL metrics summaries
Pipeline #43728 passed
Showing
with 131 additions and 56 deletions
package org.eclipsefoundation.caching.config;
import org.eclipsefoundation.caching.model.ParameterizedCacheKey;
import io.micrometer.common.annotation.ValueResolver;
import jakarta.inject.Singleton;
@Singleton
public class CacheKeyClassTagResolver implements ValueResolver {
@Override
public String resolve(Object parameter) {
if (parameter instanceof ParameterizedCacheKey castKey) {
return castKey.getClazz().getSimpleName();
}
return null;
}
}
...@@ -17,8 +17,8 @@ import org.eclipsefoundation.caching.model.ParameterizedCacheKey; ...@@ -17,8 +17,8 @@ import org.eclipsefoundation.caching.model.ParameterizedCacheKey;
import org.eclipsefoundation.caching.service.impl.DefaultLoadingCacheManager.LoadingCacheWrapper; import org.eclipsefoundation.caching.service.impl.DefaultLoadingCacheManager.LoadingCacheWrapper;
/** /**
* Interface for service that manages loading caches within applications. This will remove the burden of managing best * Interface for service that manages loading caches within applications. This will remove the burden of managing best implementations and
* implementations and improve consistency when accessing these caches. * improve consistency when accessing these caches.
* *
* @author Martin Lowe * @author Martin Lowe
* *
...@@ -30,8 +30,8 @@ public interface LoadingCacheManager { ...@@ -30,8 +30,8 @@ public interface LoadingCacheManager {
* Retrieves a list of cached results for the given key. * Retrieves a list of cached results for the given key.
* *
* <p> * <p>
* If there is no cache value populated, it will be generated. If the value exists but is expired, then the cache will * If there is no cache value populated, it will be generated. If the value exists but is expired, then the cache will return the
* return the current value and begin recalculations in the background. * current value and begin recalculations in the background.
* *
* @param <T> the type of data returned by the given cache * @param <T> the type of data returned by the given cache
* @param k the key to use in cache lookups * @param k the key to use in cache lookups
...@@ -40,8 +40,7 @@ public interface LoadingCacheManager { ...@@ -40,8 +40,7 @@ public interface LoadingCacheManager {
<T> List<T> getList(ParameterizedCacheKey k); <T> List<T> getList(ParameterizedCacheKey k);
/** /**
* Retrieves an unmodifiable list of generic caches managed by this service. Used in testing and management of the * Retrieves an unmodifiable list of generic caches managed by this service. Used in testing and management of the caches.
* caches.
* *
* @return list of caches for the current application * @return list of caches for the current application
*/ */
...@@ -55,6 +54,9 @@ public interface LoadingCacheManager { ...@@ -55,6 +54,9 @@ public interface LoadingCacheManager {
*/ */
public interface LoadingCacheProvider<T> { public interface LoadingCacheProvider<T> {
public static final String METRICS_REGION_NAME = "eclipse_precache_timing";
public static final String METRICS_KEY_TAG_NAME = "type";
/** /**
* Retrieves raw data given a cache key. * Retrieves raw data given a cache key.
* *
......
...@@ -21,6 +21,7 @@ import java.util.function.Predicate; ...@@ -21,6 +21,7 @@ import java.util.function.Predicate;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.eclipse.microprofile.config.inject.ConfigProperty; import org.eclipse.microprofile.config.inject.ConfigProperty;
import org.eclipsefoundation.caching.config.CacheKeyClassTagResolver;
import org.eclipsefoundation.caching.exception.CacheCalculationException; import org.eclipsefoundation.caching.exception.CacheCalculationException;
import org.eclipsefoundation.caching.model.CacheWrapper; import org.eclipsefoundation.caching.model.CacheWrapper;
import org.eclipsefoundation.caching.model.ParameterizedCacheKey; import org.eclipsefoundation.caching.model.ParameterizedCacheKey;
...@@ -32,6 +33,8 @@ import org.jboss.resteasy.specimpl.MultivaluedMapImpl; ...@@ -32,6 +33,8 @@ import org.jboss.resteasy.specimpl.MultivaluedMapImpl;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import io.micrometer.core.annotation.Timed;
import io.micrometer.core.aop.MeterTag;
import io.quarkus.cache.Cache; import io.quarkus.cache.Cache;
import io.quarkus.cache.CacheInvalidate; import io.quarkus.cache.CacheInvalidate;
import io.quarkus.cache.CacheInvalidateAll; import io.quarkus.cache.CacheInvalidateAll;
...@@ -40,12 +43,11 @@ import io.quarkus.cache.CacheName; ...@@ -40,12 +43,11 @@ import io.quarkus.cache.CacheName;
import io.quarkus.cache.CacheResult; import io.quarkus.cache.CacheResult;
import io.quarkus.cache.CaffeineCache; import io.quarkus.cache.CaffeineCache;
import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import jakarta.ws.rs.core.MultivaluedMap; import jakarta.ws.rs.core.MultivaluedMap;
/** /**
* Utililzes Quarkus caching extensions in order to cache and retrieve data. Reason we augment over the Quarkus core * Utililzes Quarkus caching extensions in order to cache and retrieve data. Reason we augment over the Quarkus core cache (caffeine) is so
* cache (caffeine) is so that we can record TTLs for cache objects which aren't exposed by base cache. * that we can record TTLs for cache objects which aren't exposed by base cache.
* *
* @author Martin Lowe * @author Martin Lowe
* *
...@@ -54,16 +56,17 @@ import jakarta.ws.rs.core.MultivaluedMap; ...@@ -54,16 +56,17 @@ import jakarta.ws.rs.core.MultivaluedMap;
public class QuarkusCachingService implements CachingService { public class QuarkusCachingService implements CachingService {
private static final Logger LOGGER = LoggerFactory.getLogger(QuarkusCachingService.class); private static final Logger LOGGER = LoggerFactory.getLogger(QuarkusCachingService.class);
@ConfigProperty(name = CachingPropertyNames.CACHE_TTL_MAX_SECONDS, defaultValue = "900")
long ttlWrite; long ttlWrite;
@Inject
RequestWrapper wrapper; RequestWrapper wrapper;
@Inject
@CacheName("default")
Cache cache; Cache cache;
public QuarkusCachingService(RequestWrapper wrapper, @CacheName("default") Cache cache,
@ConfigProperty(name = CachingPropertyNames.CACHE_TTL_MAX_SECONDS, defaultValue = "900") long ttlWrite) {
this.cache = cache;
this.wrapper = wrapper;
this.ttlWrite = ttlWrite;
}
@Override @Override
public <T> CacheWrapper<T> get(String id, MultivaluedMap<String, String> params, Class<?> rawType, Callable<? extends T> callable) { public <T> CacheWrapper<T> get(String id, MultivaluedMap<String, String> params, Class<?> rawType, Callable<? extends T> callable) {
Objects.requireNonNull(callable); Objects.requireNonNull(callable);
...@@ -143,7 +146,7 @@ public class QuarkusCachingService implements CachingService { ...@@ -143,7 +146,7 @@ public class QuarkusCachingService implements CachingService {
public long getMaxAge() { public long getMaxAge() {
return TimeUnit.MILLISECONDS.convert(ttlWrite, TimeUnit.SECONDS); return TimeUnit.MILLISECONDS.convert(ttlWrite, TimeUnit.SECONDS);
} }
@Override @Override
public Cache getCache() { public Cache getCache() {
return this.cache; return this.cache;
...@@ -166,11 +169,13 @@ public class QuarkusCachingService implements CachingService { ...@@ -166,11 +169,13 @@ public class QuarkusCachingService implements CachingService {
} }
} }
@Timed(value = "eclipse_cache_timing")
@CacheResult(cacheName = "default") @CacheResult(cacheName = "default")
<T> CacheWrapper<T> get(@CacheKey ParameterizedCacheKey cacheKey, Callable<? extends T> callable) { <T> CacheWrapper<T> get(@MeterTag(resolver = CacheKeyClassTagResolver.class) @CacheKey ParameterizedCacheKey cacheKey,
Callable<? extends T> callable) {
CacheWrapper.Builder<T> cacheWrap = CacheWrapper.builder(); CacheWrapper.Builder<T> cacheWrap = CacheWrapper.builder();
try{ try {
T data = callable.call(); T data = callable.call();
cacheWrap.setData(Optional.ofNullable(data)); cacheWrap.setData(Optional.ofNullable(data));
} catch (Exception e) { } catch (Exception e) {
...@@ -189,10 +194,10 @@ public class QuarkusCachingService implements CachingService { ...@@ -189,10 +194,10 @@ public class QuarkusCachingService implements CachingService {
LOGGER.debug("Timeout for {}: {}", cacheKey, System.currentTimeMillis() + getMaxAge()); LOGGER.debug("Timeout for {}: {}", cacheKey, System.currentTimeMillis() + getMaxAge());
return System.currentTimeMillis() + getMaxAge(); return System.currentTimeMillis() + getMaxAge();
} }
/** /**
* To prevent modification of maps/cache key entries post retrieval, we should be referencing copies of the maps rather * To prevent modification of maps/cache key entries post retrieval, we should be referencing copies of the maps rather than the direct
* than the direct passed reference for safety. * passed reference for safety.
* *
* @return the copied map, or an empty map if null * @return the copied map, or an empty map if null
*/ */
......
...@@ -14,6 +14,7 @@ package org.eclipsefoundation.efservices.precaches; ...@@ -14,6 +14,7 @@ package org.eclipsefoundation.efservices.precaches;
import java.util.List; import java.util.List;
import org.eclipse.microprofile.rest.client.inject.RestClient; import org.eclipse.microprofile.rest.client.inject.RestClient;
import org.eclipsefoundation.caching.config.CacheKeyClassTagResolver;
import org.eclipsefoundation.caching.model.ParameterizedCacheKey; import org.eclipsefoundation.caching.model.ParameterizedCacheKey;
import org.eclipsefoundation.caching.service.LoadingCacheManager.LoadingCacheProvider; import org.eclipsefoundation.caching.service.LoadingCacheManager.LoadingCacheProvider;
import org.eclipsefoundation.core.service.APIMiddleware; import org.eclipsefoundation.core.service.APIMiddleware;
...@@ -22,23 +23,28 @@ import org.eclipsefoundation.efservices.api.models.InterestGroup; ...@@ -22,23 +23,28 @@ import org.eclipsefoundation.efservices.api.models.InterestGroup;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import io.micrometer.core.annotation.Timed;
import io.micrometer.core.aop.MeterTag;
import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import jakarta.inject.Named; import jakarta.inject.Named;
@Named("interest-groups") @Named("interest-groups")
@ApplicationScoped @ApplicationScoped
public class InterestGroupPrecacheProvider implements LoadingCacheProvider<InterestGroup> { public class InterestGroupPrecacheProvider implements LoadingCacheProvider<InterestGroup> {
private static final Logger LOGGER = LoggerFactory.getLogger(InterestGroupPrecacheProvider.class); private static final Logger LOGGER = LoggerFactory.getLogger(InterestGroupPrecacheProvider.class);
@RestClient
ProjectsAPI projectAPI;
@Inject private ProjectsAPI projectAPI;
APIMiddleware middleware; private APIMiddleware middleware;
public InterestGroupPrecacheProvider(@RestClient ProjectsAPI projectAPI, APIMiddleware middleware) {
this.projectAPI = projectAPI;
this.middleware = middleware;
}
@Override @Override
public List<InterestGroup> fetchData(ParameterizedCacheKey k) { @Timed(value = METRICS_REGION_NAME)
public List<InterestGroup> fetchData(
@MeterTag(resolver = CacheKeyClassTagResolver.class, key = METRICS_KEY_TAG_NAME) ParameterizedCacheKey k) {
LOGGER.debug("LOADING ALL IGS"); LOGGER.debug("LOADING ALL IGS");
return middleware.getAll(projectAPI::getInterestGroups, InterestGroup.class); return middleware.getAll(projectAPI::getInterestGroups, InterestGroup.class);
} }
......
...@@ -15,6 +15,7 @@ import java.util.List; ...@@ -15,6 +15,7 @@ import java.util.List;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.eclipse.microprofile.rest.client.inject.RestClient; import org.eclipse.microprofile.rest.client.inject.RestClient;
import org.eclipsefoundation.caching.config.CacheKeyClassTagResolver;
import org.eclipsefoundation.caching.model.ParameterizedCacheKey; import org.eclipsefoundation.caching.model.ParameterizedCacheKey;
import org.eclipsefoundation.caching.service.LoadingCacheManager.LoadingCacheProvider; import org.eclipsefoundation.caching.service.LoadingCacheManager.LoadingCacheProvider;
import org.eclipsefoundation.core.service.APIMiddleware; import org.eclipsefoundation.core.service.APIMiddleware;
...@@ -24,23 +25,28 @@ import org.eclipsefoundation.efservices.namespace.EfServicesParameterNames; ...@@ -24,23 +25,28 @@ import org.eclipsefoundation.efservices.namespace.EfServicesParameterNames;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import io.micrometer.core.annotation.Timed;
import io.micrometer.core.aop.MeterTag;
import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import jakarta.inject.Named; import jakarta.inject.Named;
@Named("projects") @Named("projects")
@ApplicationScoped @ApplicationScoped
public class ProjectPrecacheProvider implements LoadingCacheProvider<Project> { public class ProjectPrecacheProvider implements LoadingCacheProvider<Project> {
private static final Logger LOGGER = LoggerFactory.getLogger(ProjectPrecacheProvider.class); private static final Logger LOGGER = LoggerFactory.getLogger(ProjectPrecacheProvider.class);
@RestClient
ProjectsAPI projectAPI;
@Inject private ProjectsAPI projectAPI;
APIMiddleware middleware; private APIMiddleware middleware;
public ProjectPrecacheProvider(@RestClient ProjectsAPI projectAPI, APIMiddleware middleware) {
this.projectAPI = projectAPI;
this.middleware = middleware;
}
@Override @Override
public List<Project> fetchData(ParameterizedCacheKey k) { @Timed(value = METRICS_REGION_NAME)
public List<Project> fetchData(
@MeterTag(resolver = CacheKeyClassTagResolver.class, key = METRICS_KEY_TAG_NAME) ParameterizedCacheKey k) {
LOGGER.debug("LOADING PROJECTS WITH KEY: {}", k); LOGGER.debug("LOADING PROJECTS WITH KEY: {}", k);
String specProjectParam = k.getParams().getFirst(EfServicesParameterNames.SPEC_PROJECT_RAW); String specProjectParam = k.getParams().getFirst(EfServicesParameterNames.SPEC_PROJECT_RAW);
int specProjectFlag = StringUtils.isNotBlank(specProjectParam) ? Integer.valueOf(specProjectParam) : 0; int specProjectFlag = StringUtils.isNotBlank(specProjectParam) ? Integer.valueOf(specProjectParam) : 0;
......
...@@ -14,6 +14,7 @@ package org.eclipsefoundation.efservices.precaches; ...@@ -14,6 +14,7 @@ package org.eclipsefoundation.efservices.precaches;
import java.util.List; import java.util.List;
import org.eclipse.microprofile.rest.client.inject.RestClient; import org.eclipse.microprofile.rest.client.inject.RestClient;
import org.eclipsefoundation.caching.config.CacheKeyClassTagResolver;
import org.eclipsefoundation.caching.model.ParameterizedCacheKey; import org.eclipsefoundation.caching.model.ParameterizedCacheKey;
import org.eclipsefoundation.caching.service.LoadingCacheManager.LoadingCacheProvider; import org.eclipsefoundation.caching.service.LoadingCacheManager.LoadingCacheProvider;
import org.eclipsefoundation.core.service.APIMiddleware; import org.eclipsefoundation.core.service.APIMiddleware;
...@@ -22,8 +23,9 @@ import org.eclipsefoundation.efservices.api.models.WorkingGroup; ...@@ -22,8 +23,9 @@ import org.eclipsefoundation.efservices.api.models.WorkingGroup;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import io.micrometer.core.annotation.Timed;
import io.micrometer.core.aop.MeterTag;
import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import jakarta.inject.Named; import jakarta.inject.Named;
@Named("working-groups") @Named("working-groups")
...@@ -31,14 +33,18 @@ import jakarta.inject.Named; ...@@ -31,14 +33,18 @@ import jakarta.inject.Named;
public class WorkingGroupPrecacheProvider implements LoadingCacheProvider<WorkingGroup> { public class WorkingGroupPrecacheProvider implements LoadingCacheProvider<WorkingGroup> {
private static final Logger LOGGER = LoggerFactory.getLogger(WorkingGroupPrecacheProvider.class); private static final Logger LOGGER = LoggerFactory.getLogger(WorkingGroupPrecacheProvider.class);
@RestClient private WorkingGroupsAPI api;
WorkingGroupsAPI api; private APIMiddleware middleware;
@Inject public WorkingGroupPrecacheProvider(@RestClient WorkingGroupsAPI api, APIMiddleware middleware) {
APIMiddleware middleware; this.api = api;
this.middleware = middleware;
}
@Override @Override
public List<WorkingGroup> fetchData(ParameterizedCacheKey k) { @Timed(value = METRICS_REGION_NAME)
public List<WorkingGroup> fetchData(
@MeterTag(resolver = CacheKeyClassTagResolver.class, key = METRICS_KEY_TAG_NAME) ParameterizedCacheKey k) {
LOGGER.debug("LOADING PROJECTS WITH KEY: {}", k); LOGGER.debug("LOADING PROJECTS WITH KEY: {}", k);
return middleware.getAll(params -> api.get(params), WorkingGroup.class); return middleware.getAll(params -> api.get(params), WorkingGroup.class);
} }
......
...@@ -13,17 +13,15 @@ package org.eclipsefoundation.persistence.dao; ...@@ -13,17 +13,15 @@ package org.eclipsefoundation.persistence.dao;
import java.util.List; import java.util.List;
import jakarta.enterprise.event.Observes;
import org.eclipse.microprofile.health.HealthCheck; import org.eclipse.microprofile.health.HealthCheck;
import org.eclipsefoundation.persistence.dto.BareNode; import org.eclipsefoundation.persistence.dto.BareNode;
import org.eclipsefoundation.persistence.model.RDBMSQuery; import org.eclipsefoundation.persistence.model.RDBMSQuery;
import io.quarkus.runtime.StartupEvent; import io.quarkus.runtime.StartupEvent;
import jakarta.enterprise.event.Observes;
/** /**
* Interface for classes communicating with MongoDB. Assumes that reactive stream asynchronous calls are used rather * Interface for classes communicating with MongoDB. Assumes that reactive stream asynchronous calls are used rather than blocking methods.
* than blocking methods.
* *
* @author Martin Lowe * @author Martin Lowe
*/ */
...@@ -38,8 +36,7 @@ public interface PersistenceDao extends HealthCheck { ...@@ -38,8 +36,7 @@ public interface PersistenceDao extends HealthCheck {
<T extends BareNode> List<T> get(RDBMSQuery<T> q); <T extends BareNode> List<T> get(RDBMSQuery<T> q);
/** /**
* Adds a list of typed documents to the currently active database and schema, using the query object to access the * Adds a list of typed documents to the currently active database and schema, using the query object to access the document type.
* document type.
* *
* @param <T> the type of document to post * @param <T> the type of document to post
* @param q the query object for the current operation * @param q the query object for the current operation
...@@ -66,8 +63,8 @@ public interface PersistenceDao extends HealthCheck { ...@@ -66,8 +63,8 @@ public interface PersistenceDao extends HealthCheck {
Long count(RDBMSQuery<?> q); Long count(RDBMSQuery<?> q);
/** /**
* Retrieves a reference of an object to be used in operations on the server. This object is a proxy meant to help build * Retrieves a reference of an object to be used in operations on the server. This object is a proxy meant to help build FK
* FK relationships, but can be used in other operations as well. * relationships, but can be used in other operations as well.
* *
* @param id the ID of the object to retrieve * @param id the ID of the object to retrieve
* @param type the type of object that should be retrieved * @param type the type of object that should be retrieved
......
...@@ -12,11 +12,13 @@ ...@@ -12,11 +12,13 @@
**********************************************************************/ **********************************************************************/
package org.eclipsefoundation.persistence.dao.impl; package org.eclipsefoundation.persistence.dao.impl;
import java.time.Duration;
import java.time.Instant;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.stream.Collectors;
import org.eclipse.microprofile.config.ConfigProvider; import org.eclipse.microprofile.config.ConfigProvider;
import org.eclipse.microprofile.config.inject.ConfigProperty;
import org.eclipse.microprofile.health.HealthCheckResponse; import org.eclipse.microprofile.health.HealthCheckResponse;
import org.eclipsefoundation.persistence.dao.PersistenceDao; import org.eclipsefoundation.persistence.dao.PersistenceDao;
import org.eclipsefoundation.persistence.dto.BareNode; import org.eclipsefoundation.persistence.dto.BareNode;
...@@ -30,6 +32,8 @@ import org.hibernate.Session; ...@@ -30,6 +32,8 @@ import org.hibernate.Session;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import io.micrometer.core.instrument.Metrics;
import io.micrometer.core.instrument.Tags;
import jakarta.inject.Inject; import jakarta.inject.Inject;
import jakarta.persistence.EntityManager; import jakarta.persistence.EntityManager;
import jakarta.persistence.ParameterMode; import jakarta.persistence.ParameterMode;
...@@ -47,6 +51,12 @@ public abstract class BaseHibernateDao implements PersistenceDao { ...@@ -47,6 +51,12 @@ public abstract class BaseHibernateDao implements PersistenceDao {
private static final Logger LOGGER = LoggerFactory.getLogger(BaseHibernateDao.class); private static final Logger LOGGER = LoggerFactory.getLogger(BaseHibernateDao.class);
private static final int DEFAULT_MAX_LIMIT = 1000; private static final int DEFAULT_MAX_LIMIT = 1000;
private static final String METRIC_NAME = "eclipse_sql_time";
private static final String METRIC_DOCTYPE_TAG_NAME = "type";
private static final String METRIC_OPERATION_TAG_NAME = "operation";
@ConfigProperty(name = "quarkus.micrometer.enabled")
boolean isMetricsEnabled;
@Inject @Inject
EntityManager primaryConnectionManager; EntityManager primaryConnectionManager;
...@@ -57,9 +67,8 @@ public abstract class BaseHibernateDao implements PersistenceDao { ...@@ -57,9 +67,8 @@ public abstract class BaseHibernateDao implements PersistenceDao {
public <T extends BareNode> List<T> get(RDBMSQuery<T> q) { public <T extends BareNode> List<T> get(RDBMSQuery<T> q) {
// set up the query, either for stored procedures or standard SQL // set up the query, either for stored procedures or standard SQL
Query query; Query query;
if (q.getFilter() instanceof ParameterizedCallStatement) { if (q.getFilter() instanceof ParameterizedCallStatement callStmt) {
query = getSecondaryEntityManager() query = getSecondaryEntityManager().createStoredProcedureQuery(callStmt.getCallStatement(), q.getDocType());
.createStoredProcedureQuery(((ParameterizedCallStatement) q.getFilter()).getCallStatement(), q.getDocType());
} else { } else {
query = getSecondaryEntityManager().createQuery(q.getFilter().getSelectSql(), q.getDocType()); query = getSecondaryEntityManager().createQuery(q.getFilter().getSelectSql(), q.getDocType());
if (LOGGER.isDebugEnabled()) { if (LOGGER.isDebugEnabled()) {
...@@ -76,6 +85,9 @@ public abstract class BaseHibernateDao implements PersistenceDao { ...@@ -76,6 +85,9 @@ public abstract class BaseHibernateDao implements PersistenceDao {
query = query.setFirstResult(getOffset(q)).setMaxResults(getLimit(q)); query = query.setFirstResult(getOffset(q)).setMaxResults(getLimit(q));
} }
} }
// used for metrics if enabled, only capture actual SQL query op
Instant start = Instant.now();
// update ordinals for the request // update ordinals for the request
handleOrdinals(q, query); handleOrdinals(q, query);
// run the query and detach the results (stability w/ hibernate contexts) // run the query and detach the results (stability w/ hibernate contexts)
...@@ -84,6 +96,7 @@ public abstract class BaseHibernateDao implements PersistenceDao { ...@@ -84,6 +96,7 @@ public abstract class BaseHibernateDao implements PersistenceDao {
if (r != null) if (r != null)
getSecondaryEntityManager().detach(r); getSecondaryEntityManager().detach(r);
}); });
recordMetric(q.getDocType(), "query", start);
return results; return results;
} }
...@@ -97,6 +110,8 @@ public abstract class BaseHibernateDao implements PersistenceDao { ...@@ -97,6 +110,8 @@ public abstract class BaseHibernateDao implements PersistenceDao {
EntityManager em = getPrimaryEntityManager(); EntityManager em = getPrimaryEntityManager();
Session s = em.unwrap(Session.class); Session s = em.unwrap(Session.class);
// used for metrics if enabled, only capture actual SQL query op
Instant start = Instant.now();
// for each doc, check if update or create // for each doc, check if update or create
List<T> updatedDocs = new ArrayList<>(documents.size()); List<T> updatedDocs = new ArrayList<>(documents.size());
for (T doc : documents) { for (T doc : documents) {
...@@ -116,6 +131,7 @@ public abstract class BaseHibernateDao implements PersistenceDao { ...@@ -116,6 +131,7 @@ public abstract class BaseHibernateDao implements PersistenceDao {
// add the ref to the output list // add the ref to the output list
updatedDocs.add(doc); updatedDocs.add(doc);
} }
recordMetric(q.getDocType(), "insert", start);
return updatedDocs; return updatedDocs;
} }
...@@ -129,11 +145,14 @@ public abstract class BaseHibernateDao implements PersistenceDao { ...@@ -129,11 +145,14 @@ public abstract class BaseHibernateDao implements PersistenceDao {
// retrieve results for the given deletion query to delete using entity manager // retrieve results for the given deletion query to delete using entity manager
EntityManager em = getPrimaryEntityManager(); EntityManager em = getPrimaryEntityManager();
List<T> results = get(q); List<T> results = get(q);
// used for metrics if enabled, only capture actual SQL query op for deletion
Instant start = Instant.now();
if (results != null) { if (results != null) {
// remove all matched documents, merging to ensure we have attached versions // remove all matched documents, merging to ensure we have attached versions
// (transaction barriers sometimes break) // (transaction barriers sometimes break)
results.forEach(r -> em.remove(em.merge(r))); results.forEach(r -> em.remove(em.merge(r)));
} }
recordMetric(q.getDocType(), "delete", start);
} }
@Transactional @Transactional
...@@ -147,7 +166,12 @@ public abstract class BaseHibernateDao implements PersistenceDao { ...@@ -147,7 +166,12 @@ public abstract class BaseHibernateDao implements PersistenceDao {
// build base query // build base query
TypedQuery<Long> query = em.createQuery(q.getFilter().getCountSql(), Long.class); TypedQuery<Long> query = em.createQuery(q.getFilter().getCountSql(), Long.class);
handleOrdinals(q, query); handleOrdinals(q, query);
return query.getSingleResult(); // used for metrics if enabled, only capture actual SQL query op
Instant start = Instant.now();
Long result = query.getSingleResult();
recordMetric(q.getDocType(), "count", start);
return result;
} }
@Override @Override
...@@ -234,8 +258,7 @@ public abstract class BaseHibernateDao implements PersistenceDao { ...@@ -234,8 +258,7 @@ public abstract class BaseHibernateDao implements PersistenceDao {
LOGGER.debug("Found null reference in result list for type {}", clazz.getSimpleName()); LOGGER.debug("Found null reference in result list for type {}", clazz.getSimpleName());
} }
return out != null; return out != null;
}).collect(Collectors.toList()); }).toList();
} }
@Override @Override
...@@ -263,4 +286,15 @@ public abstract class BaseHibernateDao implements PersistenceDao { ...@@ -263,4 +286,15 @@ public abstract class BaseHibernateDao implements PersistenceDao {
protected EntityManager getSecondaryEntityManager() { protected EntityManager getSecondaryEntityManager() {
return getPrimaryEntityManager(); return getPrimaryEntityManager();
} }
private void recordMetric(Class<?> doctype, String operation, Instant startingTime) {
// if micrometer metrics enabled, capture the metrics for the SQL query
// required as the synthetic class registered by the deployment doesn't get properly metered via annotation
if (isMetricsEnabled) {
// using the global registry, fetch the timer and record the new time
Metrics.globalRegistry
.timer(METRIC_NAME, Tags.of(METRIC_DOCTYPE_TAG_NAME, doctype.getSimpleName(), METRIC_OPERATION_TAG_NAME, operation))
.record(Duration.between(startingTime, Instant.now()));
}
}
} }
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment