diff --git a/caching/src/main/java/org/eclipsefoundation/caching/config/CacheKeyClassTagResolver.java b/caching/src/main/java/org/eclipsefoundation/caching/config/CacheKeyClassTagResolver.java
new file mode 100644
index 0000000000000000000000000000000000000000..178fdddcb2e890a2321f6bd483119ea3ed7bbd3a
--- /dev/null
+++ b/caching/src/main/java/org/eclipsefoundation/caching/config/CacheKeyClassTagResolver.java
@@ -0,0 +1,19 @@
+package org.eclipsefoundation.caching.config;
+
+import org.eclipsefoundation.caching.model.ParameterizedCacheKey;
+
+import io.micrometer.common.annotation.ValueResolver;
+import jakarta.inject.Singleton;
+
+@Singleton
+public class CacheKeyClassTagResolver implements ValueResolver {
+
+    @Override
+    public String resolve(Object parameter) {
+        if (parameter instanceof ParameterizedCacheKey castKey) {
+            return castKey.getClazz().getSimpleName();
+        }
+        return null;
+    }
+
+}
diff --git a/caching/src/main/java/org/eclipsefoundation/caching/service/LoadingCacheManager.java b/caching/src/main/java/org/eclipsefoundation/caching/service/LoadingCacheManager.java
index e77c5c746b0da53968c16ebdedb6c8a95896daa2..de2598df463b6a337e93164af08aa4f48190c161 100644
--- a/caching/src/main/java/org/eclipsefoundation/caching/service/LoadingCacheManager.java
+++ b/caching/src/main/java/org/eclipsefoundation/caching/service/LoadingCacheManager.java
@@ -17,8 +17,8 @@ import org.eclipsefoundation.caching.model.ParameterizedCacheKey;
 import org.eclipsefoundation.caching.service.impl.DefaultLoadingCacheManager.LoadingCacheWrapper;
 
 /**
- * Interface for service that manages loading caches within applications. This will remove the burden of managing best
- * implementations and improve consistency when accessing these caches.
+ * Interface for service that manages loading caches within applications. This will remove the burden of managing best implementations and
+ * improve consistency when accessing these caches.
  * 
  * @author Martin Lowe
  *
@@ -30,8 +30,8 @@ public interface LoadingCacheManager {
      * Retrieves a list of cached results for the given key.
      * 
      * <p>
-     * If there is no cache value populated, it will be generated. If the value exists but is expired, then the cache will
-     * return the current value and begin recalculations in the background.
+     * If there is no cache value populated, it will be generated. If the value exists but is expired, then the cache will return the
+     * current value and begin recalculations in the background.
      * 
      * @param <T> the type of data returned by the given cache
      * @param k the key to use in cache lookups
@@ -40,8 +40,7 @@ public interface LoadingCacheManager {
     <T> List<T> getList(ParameterizedCacheKey k);
 
     /**
-     * Retrieves an unmodifiable list of generic caches managed by this service. Used in testing and management of the
-     * caches.
+     * Retrieves an unmodifiable list of generic caches managed by this service. Used in testing and management of the caches.
      * 
      * @return list of caches for the current application
      */
@@ -55,6 +54,9 @@ public interface LoadingCacheManager {
      */
     public interface LoadingCacheProvider<T> {
 
+        public static final String METRICS_REGION_NAME = "eclipse_precache_timing";
+        public static final String METRICS_KEY_TAG_NAME = "type";
+
         /**
          * Retrieves raw data given a cache key.
          * 
diff --git a/caching/src/main/java/org/eclipsefoundation/caching/service/impl/QuarkusCachingService.java b/caching/src/main/java/org/eclipsefoundation/caching/service/impl/QuarkusCachingService.java
index a0d95078bb1100366a251e78dfd9ac4a79df1193..881216b6d37cc341d479e93a6cc06c71e31b4e8d 100644
--- a/caching/src/main/java/org/eclipsefoundation/caching/service/impl/QuarkusCachingService.java
+++ b/caching/src/main/java/org/eclipsefoundation/caching/service/impl/QuarkusCachingService.java
@@ -21,6 +21,7 @@ import java.util.function.Predicate;
 import java.util.stream.Collectors;
 
 import org.eclipse.microprofile.config.inject.ConfigProperty;
+import org.eclipsefoundation.caching.config.CacheKeyClassTagResolver;
 import org.eclipsefoundation.caching.exception.CacheCalculationException;
 import org.eclipsefoundation.caching.model.CacheWrapper;
 import org.eclipsefoundation.caching.model.ParameterizedCacheKey;
@@ -32,6 +33,8 @@ import org.jboss.resteasy.specimpl.MultivaluedMapImpl;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import io.micrometer.core.annotation.Timed;
+import io.micrometer.core.aop.MeterTag;
 import io.quarkus.cache.Cache;
 import io.quarkus.cache.CacheInvalidate;
 import io.quarkus.cache.CacheInvalidateAll;
@@ -40,12 +43,11 @@ import io.quarkus.cache.CacheName;
 import io.quarkus.cache.CacheResult;
 import io.quarkus.cache.CaffeineCache;
 import jakarta.enterprise.context.ApplicationScoped;
-import jakarta.inject.Inject;
 import jakarta.ws.rs.core.MultivaluedMap;
 
 /**
- * Utililzes Quarkus caching extensions in order to cache and retrieve data. Reason we augment over the Quarkus core
- * cache (caffeine) is so that we can record TTLs for cache objects which aren't exposed by base cache.
+ * Utililzes Quarkus caching extensions in order to cache and retrieve data. Reason we augment over the Quarkus core cache (caffeine) is so
+ * that we can record TTLs for cache objects which aren't exposed by base cache.
  * 
  * @author Martin Lowe
  *
@@ -54,16 +56,17 @@ import jakarta.ws.rs.core.MultivaluedMap;
 public class QuarkusCachingService implements CachingService {
     private static final Logger LOGGER = LoggerFactory.getLogger(QuarkusCachingService.class);
 
-    @ConfigProperty(name = CachingPropertyNames.CACHE_TTL_MAX_SECONDS, defaultValue = "900")
     long ttlWrite;
-
-    @Inject
     RequestWrapper wrapper;
-
-    @Inject
-    @CacheName("default")
     Cache cache;
 
+    public QuarkusCachingService(RequestWrapper wrapper, @CacheName("default") Cache cache,
+            @ConfigProperty(name = CachingPropertyNames.CACHE_TTL_MAX_SECONDS, defaultValue = "900") long ttlWrite) {
+        this.cache = cache;
+        this.wrapper = wrapper;
+        this.ttlWrite = ttlWrite;
+    }
+
     @Override
     public <T> CacheWrapper<T> get(String id, MultivaluedMap<String, String> params, Class<?> rawType, Callable<? extends T> callable) {
         Objects.requireNonNull(callable);
@@ -143,7 +146,7 @@ public class QuarkusCachingService implements CachingService {
     public long getMaxAge() {
         return TimeUnit.MILLISECONDS.convert(ttlWrite, TimeUnit.SECONDS);
     }
-    
+
     @Override
     public Cache getCache() {
         return this.cache;
@@ -166,11 +169,13 @@ public class QuarkusCachingService implements CachingService {
         }
     }
 
+    @Timed(value = "eclipse_cache_timing")
     @CacheResult(cacheName = "default")
-    <T> CacheWrapper<T> get(@CacheKey ParameterizedCacheKey cacheKey, Callable<? extends T> callable) {
+    <T> CacheWrapper<T> get(@MeterTag(resolver = CacheKeyClassTagResolver.class) @CacheKey ParameterizedCacheKey cacheKey,
+            Callable<? extends T> callable) {
 
         CacheWrapper.Builder<T> cacheWrap = CacheWrapper.builder();
-        try{
+        try {
             T data = callable.call();
             cacheWrap.setData(Optional.ofNullable(data));
         } catch (Exception e) {
@@ -189,10 +194,10 @@ public class QuarkusCachingService implements CachingService {
         LOGGER.debug("Timeout for {}: {}", cacheKey, System.currentTimeMillis() + getMaxAge());
         return System.currentTimeMillis() + getMaxAge();
     }
-    
+
     /**
-     * To prevent modification of maps/cache key entries post retrieval, we should be referencing copies of the maps rather
-     * than the direct passed reference for safety.
+     * To prevent modification of maps/cache key entries post retrieval, we should be referencing copies of the maps rather than the direct
+     * passed reference for safety.
      * 
      * @return the copied map, or an empty map if null
      */
diff --git a/efservices/src/main/java/org/eclipsefoundation/efservices/precaches/InterestGroupPrecacheProvider.java b/efservices/src/main/java/org/eclipsefoundation/efservices/precaches/InterestGroupPrecacheProvider.java
index e5865843a237b576601e71d1996d3e62b1496a1f..9250cef21685deed65276f94d30fd548395fdc16 100644
--- a/efservices/src/main/java/org/eclipsefoundation/efservices/precaches/InterestGroupPrecacheProvider.java
+++ b/efservices/src/main/java/org/eclipsefoundation/efservices/precaches/InterestGroupPrecacheProvider.java
@@ -14,6 +14,7 @@ package org.eclipsefoundation.efservices.precaches;
 import java.util.List;
 
 import org.eclipse.microprofile.rest.client.inject.RestClient;
+import org.eclipsefoundation.caching.config.CacheKeyClassTagResolver;
 import org.eclipsefoundation.caching.model.ParameterizedCacheKey;
 import org.eclipsefoundation.caching.service.LoadingCacheManager.LoadingCacheProvider;
 import org.eclipsefoundation.core.service.APIMiddleware;
@@ -22,23 +23,28 @@ import org.eclipsefoundation.efservices.api.models.InterestGroup;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import io.micrometer.core.annotation.Timed;
+import io.micrometer.core.aop.MeterTag;
 import jakarta.enterprise.context.ApplicationScoped;
-import jakarta.inject.Inject;
 import jakarta.inject.Named;
 
 @Named("interest-groups")
 @ApplicationScoped
 public class InterestGroupPrecacheProvider implements LoadingCacheProvider<InterestGroup> {
     private static final Logger LOGGER = LoggerFactory.getLogger(InterestGroupPrecacheProvider.class);
-    
-    @RestClient
-    ProjectsAPI projectAPI;
 
-    @Inject
-    APIMiddleware middleware;
+    private ProjectsAPI projectAPI;
+    private APIMiddleware middleware;
+
+    public InterestGroupPrecacheProvider(@RestClient ProjectsAPI projectAPI, APIMiddleware middleware) {
+        this.projectAPI = projectAPI;
+        this.middleware = middleware;
+    }
 
     @Override
-    public List<InterestGroup> fetchData(ParameterizedCacheKey k) {
+    @Timed(value = METRICS_REGION_NAME)
+    public List<InterestGroup> fetchData(
+            @MeterTag(resolver = CacheKeyClassTagResolver.class, key = METRICS_KEY_TAG_NAME) ParameterizedCacheKey k) {
         LOGGER.debug("LOADING ALL IGS");
         return middleware.getAll(projectAPI::getInterestGroups, InterestGroup.class);
     }
diff --git a/efservices/src/main/java/org/eclipsefoundation/efservices/precaches/ProjectPrecacheProvider.java b/efservices/src/main/java/org/eclipsefoundation/efservices/precaches/ProjectPrecacheProvider.java
index de54da1171c5fb07368d9f11de98861c12dc55dc..7c8dee13fe9e7bb75cb96726371233feb1bb259c 100644
--- a/efservices/src/main/java/org/eclipsefoundation/efservices/precaches/ProjectPrecacheProvider.java
+++ b/efservices/src/main/java/org/eclipsefoundation/efservices/precaches/ProjectPrecacheProvider.java
@@ -15,6 +15,7 @@ import java.util.List;
 
 import org.apache.commons.lang3.StringUtils;
 import org.eclipse.microprofile.rest.client.inject.RestClient;
+import org.eclipsefoundation.caching.config.CacheKeyClassTagResolver;
 import org.eclipsefoundation.caching.model.ParameterizedCacheKey;
 import org.eclipsefoundation.caching.service.LoadingCacheManager.LoadingCacheProvider;
 import org.eclipsefoundation.core.service.APIMiddleware;
@@ -24,23 +25,28 @@ import org.eclipsefoundation.efservices.namespace.EfServicesParameterNames;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import io.micrometer.core.annotation.Timed;
+import io.micrometer.core.aop.MeterTag;
 import jakarta.enterprise.context.ApplicationScoped;
-import jakarta.inject.Inject;
 import jakarta.inject.Named;
 
 @Named("projects")
 @ApplicationScoped
 public class ProjectPrecacheProvider implements LoadingCacheProvider<Project> {
     private static final Logger LOGGER = LoggerFactory.getLogger(ProjectPrecacheProvider.class);
-    
-    @RestClient
-    ProjectsAPI projectAPI;
 
-    @Inject
-    APIMiddleware middleware;
+    private ProjectsAPI projectAPI;
+    private APIMiddleware middleware;
+
+    public ProjectPrecacheProvider(@RestClient ProjectsAPI projectAPI, APIMiddleware middleware) {
+        this.projectAPI = projectAPI;
+        this.middleware = middleware;
+    }
 
     @Override
-    public List<Project> fetchData(ParameterizedCacheKey k) {
+    @Timed(value = METRICS_REGION_NAME)
+    public List<Project> fetchData(
+            @MeterTag(resolver = CacheKeyClassTagResolver.class, key = METRICS_KEY_TAG_NAME) ParameterizedCacheKey k) {
         LOGGER.debug("LOADING PROJECTS WITH KEY: {}", k);
         String specProjectParam = k.getParams().getFirst(EfServicesParameterNames.SPEC_PROJECT_RAW);
         int specProjectFlag = StringUtils.isNotBlank(specProjectParam) ? Integer.valueOf(specProjectParam) : 0;
diff --git a/efservices/src/main/java/org/eclipsefoundation/efservices/precaches/WorkingGroupPrecacheProvider.java b/efservices/src/main/java/org/eclipsefoundation/efservices/precaches/WorkingGroupPrecacheProvider.java
index f94b814c6b80a5f575949c5af31d193e29c85026..0149571ef4174f9a85b5c9e0b8d212f2e587c83a 100644
--- a/efservices/src/main/java/org/eclipsefoundation/efservices/precaches/WorkingGroupPrecacheProvider.java
+++ b/efservices/src/main/java/org/eclipsefoundation/efservices/precaches/WorkingGroupPrecacheProvider.java
@@ -14,6 +14,7 @@ package org.eclipsefoundation.efservices.precaches;
 import java.util.List;
 
 import org.eclipse.microprofile.rest.client.inject.RestClient;
+import org.eclipsefoundation.caching.config.CacheKeyClassTagResolver;
 import org.eclipsefoundation.caching.model.ParameterizedCacheKey;
 import org.eclipsefoundation.caching.service.LoadingCacheManager.LoadingCacheProvider;
 import org.eclipsefoundation.core.service.APIMiddleware;
@@ -22,8 +23,9 @@ import org.eclipsefoundation.efservices.api.models.WorkingGroup;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import io.micrometer.core.annotation.Timed;
+import io.micrometer.core.aop.MeterTag;
 import jakarta.enterprise.context.ApplicationScoped;
-import jakarta.inject.Inject;
 import jakarta.inject.Named;
 
 @Named("working-groups")
@@ -31,14 +33,18 @@ import jakarta.inject.Named;
 public class WorkingGroupPrecacheProvider implements LoadingCacheProvider<WorkingGroup> {
     private static final Logger LOGGER = LoggerFactory.getLogger(WorkingGroupPrecacheProvider.class);
 
-    @RestClient
-    WorkingGroupsAPI api;
+    private WorkingGroupsAPI api;
+    private APIMiddleware middleware;
 
-    @Inject
-    APIMiddleware middleware;
+    public WorkingGroupPrecacheProvider(@RestClient WorkingGroupsAPI api, APIMiddleware middleware) {
+        this.api = api;
+        this.middleware = middleware;
+    }
 
     @Override
-    public List<WorkingGroup> fetchData(ParameterizedCacheKey k) {
+    @Timed(value = METRICS_REGION_NAME)
+    public List<WorkingGroup> fetchData(
+            @MeterTag(resolver = CacheKeyClassTagResolver.class, key = METRICS_KEY_TAG_NAME) ParameterizedCacheKey k) {
         LOGGER.debug("LOADING PROJECTS WITH KEY: {}", k);
         return middleware.getAll(params -> api.get(params), WorkingGroup.class);
     }
diff --git a/persistence/runtime/src/main/java/org/eclipsefoundation/persistence/dao/PersistenceDao.java b/persistence/runtime/src/main/java/org/eclipsefoundation/persistence/dao/PersistenceDao.java
index 7c2ee9c1e2cde949dd791ca6b2434ff1b5467407..ac116ee1a388d197c9bd5dd867b107d61693c79f 100644
--- a/persistence/runtime/src/main/java/org/eclipsefoundation/persistence/dao/PersistenceDao.java
+++ b/persistence/runtime/src/main/java/org/eclipsefoundation/persistence/dao/PersistenceDao.java
@@ -13,17 +13,15 @@ package org.eclipsefoundation.persistence.dao;
 
 import java.util.List;
 
-import jakarta.enterprise.event.Observes;
-
 import org.eclipse.microprofile.health.HealthCheck;
 import org.eclipsefoundation.persistence.dto.BareNode;
 import org.eclipsefoundation.persistence.model.RDBMSQuery;
 
 import io.quarkus.runtime.StartupEvent;
+import jakarta.enterprise.event.Observes;
 
 /**
- * Interface for classes communicating with MongoDB. Assumes that reactive stream asynchronous calls are used rather
- * than blocking methods.
+ * Interface for classes communicating with MongoDB. Assumes that reactive stream asynchronous calls are used rather than blocking methods.
  * 
  * @author Martin Lowe
  */
@@ -38,8 +36,7 @@ public interface PersistenceDao extends HealthCheck {
     <T extends BareNode> List<T> get(RDBMSQuery<T> q);
 
     /**
-     * Adds a list of typed documents to the currently active database and schema, using the query object to access the
-     * document type.
+     * Adds a list of typed documents to the currently active database and schema, using the query object to access the document type.
      * 
      * @param <T> the type of document to post
      * @param q the query object for the current operation
@@ -66,8 +63,8 @@ public interface PersistenceDao extends HealthCheck {
     Long count(RDBMSQuery<?> q);
 
     /**
-     * Retrieves a reference of an object to be used in operations on the server. This object is a proxy meant to help build
-     * FK relationships, but can be used in other operations as well.
+     * Retrieves a reference of an object to be used in operations on the server. This object is a proxy meant to help build FK
+     * relationships, but can be used in other operations as well.
      * 
      * @param id the ID of the object to retrieve
      * @param type the type of object that should be retrieved
diff --git a/persistence/runtime/src/main/java/org/eclipsefoundation/persistence/dao/impl/BaseHibernateDao.java b/persistence/runtime/src/main/java/org/eclipsefoundation/persistence/dao/impl/BaseHibernateDao.java
index 6fa1c9efdd6551573f31d91d0797afd407541b05..28051d853186134af778ae254f9903dd322493dc 100644
--- a/persistence/runtime/src/main/java/org/eclipsefoundation/persistence/dao/impl/BaseHibernateDao.java
+++ b/persistence/runtime/src/main/java/org/eclipsefoundation/persistence/dao/impl/BaseHibernateDao.java
@@ -12,11 +12,13 @@
 **********************************************************************/
 package org.eclipsefoundation.persistence.dao.impl;
 
+import java.time.Duration;
+import java.time.Instant;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.stream.Collectors;
 
 import org.eclipse.microprofile.config.ConfigProvider;
+import org.eclipse.microprofile.config.inject.ConfigProperty;
 import org.eclipse.microprofile.health.HealthCheckResponse;
 import org.eclipsefoundation.persistence.dao.PersistenceDao;
 import org.eclipsefoundation.persistence.dto.BareNode;
@@ -30,6 +32,8 @@ import org.hibernate.Session;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import io.micrometer.core.instrument.Metrics;
+import io.micrometer.core.instrument.Tags;
 import jakarta.inject.Inject;
 import jakarta.persistence.EntityManager;
 import jakarta.persistence.ParameterMode;
@@ -47,6 +51,12 @@ public abstract class BaseHibernateDao implements PersistenceDao {
     private static final Logger LOGGER = LoggerFactory.getLogger(BaseHibernateDao.class);
 
     private static final int DEFAULT_MAX_LIMIT = 1000;
+    private static final String METRIC_NAME = "eclipse_sql_time";
+    private static final String METRIC_DOCTYPE_TAG_NAME = "type";
+    private static final String METRIC_OPERATION_TAG_NAME = "operation";
+
+    @ConfigProperty(name = "quarkus.micrometer.enabled")
+    boolean isMetricsEnabled;
 
     @Inject
     EntityManager primaryConnectionManager;
@@ -57,9 +67,8 @@ public abstract class BaseHibernateDao implements PersistenceDao {
     public <T extends BareNode> List<T> get(RDBMSQuery<T> q) {
         // set up the query, either for stored procedures or standard SQL
         Query query;
-        if (q.getFilter() instanceof ParameterizedCallStatement) {
-            query = getSecondaryEntityManager()
-                    .createStoredProcedureQuery(((ParameterizedCallStatement) q.getFilter()).getCallStatement(), q.getDocType());
+        if (q.getFilter() instanceof ParameterizedCallStatement callStmt) {
+            query = getSecondaryEntityManager().createStoredProcedureQuery(callStmt.getCallStatement(), q.getDocType());
         } else {
             query = getSecondaryEntityManager().createQuery(q.getFilter().getSelectSql(), q.getDocType());
             if (LOGGER.isDebugEnabled()) {
@@ -76,6 +85,9 @@ public abstract class BaseHibernateDao implements PersistenceDao {
                 query = query.setFirstResult(getOffset(q)).setMaxResults(getLimit(q));
             }
         }
+
+        // used for metrics if enabled, only capture actual SQL query op
+        Instant start = Instant.now();
         // update ordinals for the request
         handleOrdinals(q, query);
         // run the query and detach the results (stability w/ hibernate contexts)
@@ -84,6 +96,7 @@ public abstract class BaseHibernateDao implements PersistenceDao {
             if (r != null)
                 getSecondaryEntityManager().detach(r);
         });
+        recordMetric(q.getDocType(), "query", start);
         return results;
     }
 
@@ -97,6 +110,8 @@ public abstract class BaseHibernateDao implements PersistenceDao {
         EntityManager em = getPrimaryEntityManager();
         Session s = em.unwrap(Session.class);
 
+        // used for metrics if enabled, only capture actual SQL query op
+        Instant start = Instant.now();
         // for each doc, check if update or create
         List<T> updatedDocs = new ArrayList<>(documents.size());
         for (T doc : documents) {
@@ -116,6 +131,7 @@ public abstract class BaseHibernateDao implements PersistenceDao {
             // add the ref to the output list
             updatedDocs.add(doc);
         }
+        recordMetric(q.getDocType(), "insert", start);
         return updatedDocs;
     }
 
@@ -129,11 +145,14 @@ public abstract class BaseHibernateDao implements PersistenceDao {
         // retrieve results for the given deletion query to delete using entity manager
         EntityManager em = getPrimaryEntityManager();
         List<T> results = get(q);
+        // used for metrics if enabled, only capture actual SQL query op for deletion
+        Instant start = Instant.now();
         if (results != null) {
             // remove all matched documents, merging to ensure we have attached versions
             // (transaction barriers sometimes break)
             results.forEach(r -> em.remove(em.merge(r)));
         }
+        recordMetric(q.getDocType(), "delete", start);
     }
 
     @Transactional
@@ -147,7 +166,12 @@ public abstract class BaseHibernateDao implements PersistenceDao {
         // build base query
         TypedQuery<Long> query = em.createQuery(q.getFilter().getCountSql(), Long.class);
         handleOrdinals(q, query);
-        return query.getSingleResult();
+        // used for metrics if enabled, only capture actual SQL query op
+        Instant start = Instant.now();
+
+        Long result = query.getSingleResult();
+        recordMetric(q.getDocType(), "count", start);
+        return result;
     }
 
     @Override
@@ -234,8 +258,7 @@ public abstract class BaseHibernateDao implements PersistenceDao {
                 LOGGER.debug("Found null reference in result list for type {}", clazz.getSimpleName());
             }
             return out != null;
-        }).collect(Collectors.toList());
-
+        }).toList();
     }
 
     @Override
@@ -263,4 +286,15 @@ public abstract class BaseHibernateDao implements PersistenceDao {
     protected EntityManager getSecondaryEntityManager() {
         return getPrimaryEntityManager();
     }
+
+    private void recordMetric(Class<?> doctype, String operation, Instant startingTime) {
+        // if micrometer metrics enabled, capture the metrics for the SQL query
+        // required as the synthetic class registered by the deployment doesn't get properly metered via annotation
+        if (isMetricsEnabled) {
+            // using the global registry, fetch the timer and record the new time
+            Metrics.globalRegistry
+                    .timer(METRIC_NAME, Tags.of(METRIC_DOCTYPE_TAG_NAME, doctype.getSimpleName(), METRIC_OPERATION_TAG_NAME, operation))
+                    .record(Duration.between(startingTime, Instant.now()));
+        }
+    }
 }