diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentMK.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentMK.java index a94cc8a..234c78c 100644 --- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentMK.java +++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentMK.java @@ -25,6 +25,7 @@ import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; +import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.sql.DataSource; @@ -68,6 +69,7 @@ import org.apache.jackrabbit.oak.spi.blob.BlobStore; import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore; import org.apache.jackrabbit.oak.spi.blob.MemoryBlobStore; import org.apache.jackrabbit.oak.stats.Clock; +import org.apache.jackrabbit.oak.stats.StatisticsProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -512,6 +514,7 @@ public class DocumentMK { private String persistentCacheURI = DEFAULT_PERSISTENT_CACHE_URI; private PersistentCache persistentCache; private LeaseFailureHandler leaseFailureHandler; + private StatisticsProvider statisticsProvider = StatisticsProvider.NOOP; public Builder() { } @@ -851,6 +854,16 @@ public class DocumentMK { return this; } + public Builder setStatisticsProvider(StatisticsProvider provider) { + this.statisticsProvider = provider; + return this; + } + + @CheckForNull + public StatisticsProvider getStatisticsProvider() { + return this.statisticsProvider; + } + public Builder clock(Clock clock) { this.clock = clock; return this; diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStoreService.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStoreService.java index 98e1e6a..0d0ce4d 100644 --- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStoreService.java +++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStoreService.java @@ -78,6 +78,7 @@ import org.apache.jackrabbit.oak.spi.whiteboard.Registration; import org.apache.jackrabbit.oak.spi.whiteboard.Whiteboard; import org.apache.jackrabbit.oak.spi.whiteboard.WhiteboardExecutor; import org.apache.jackrabbit.oak.spi.whiteboard.WhiteboardUtils; +import org.apache.jackrabbit.oak.stats.StatisticsProvider; import org.osgi.framework.Bundle; import org.osgi.framework.BundleException; import org.osgi.framework.Constants; @@ -255,6 +256,12 @@ public class DocumentNodeStoreService { ) private volatile DataSource blobDataSource; + @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY, + policy = ReferencePolicy.STATIC, + referenceInterface = StatisticsProvider.class + ) + private volatile StatisticsProvider statisticsProvider; + private DocumentMK mk; private ObserverTracker observerTracker; private ComponentContext context; @@ -366,6 +373,7 @@ public class DocumentNodeStoreService { diffCachePercentage). setCacheSegmentCount(cacheSegmentCount). setCacheStackMoveDistance(cacheStackMoveDistance). + setStatisticsProvider(this.statisticsProvider). setLeaseCheck(true /* OAK-2739: enabled by default */). setLeaseFailureHandler(new LeaseFailureHandler() { diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentStoreStats.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentStoreStats.java new file mode 100644 index 0000000..7eb7378 --- /dev/null +++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentStoreStats.java @@ -0,0 +1,105 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document; + +import org.apache.felix.scr.annotations.Reference; +import org.apache.jackrabbit.oak.stats.StatisticsProvider; +import org.apache.jackrabbit.oak.stats.TimerStats; +import org.apache.jackrabbit.oak.stats.TimerStats.Context; + +import javax.annotation.Nonnull; +import java.util.EnumMap; +import java.util.Map; + +/** + * Document Store statistics helper class. + */ +public class DocumentStoreStats { + + private final StatisticsProvider statisticsProvider; + + private Map timers; + + /** + * All available DocumentStore metrics. + */ + public enum Metric { + FIND("find"), + QUERY("query"), + REMOVE("delete"), + CREATE("create"), + UPDATE("update"), + CREATE_OR_UPDATE("createOrUpdate"), + FIND_AND_MODIFY("findAndModify"); + + private String name; + + Metric(String metricName) { + this.name = metricName; + } + + public String getFullName() { + return DocumentStoreStats.class.getCanonicalName() + "." + name; + } + }; + + public DocumentStoreStats(StatisticsProvider provider) { + this.statisticsProvider = provider != null ? provider : StatisticsProvider.NOOP; + this.timers = fetchTimers(this.statisticsProvider); + } + + public DocumentStoreStats(StatisticsProvider provider, String statsNamePrefix) { + this.statisticsProvider = provider != null ? provider : StatisticsProvider.NOOP; + this.timers = fetchTimers(this.statisticsProvider); + } + + @Nonnull + public Context startTimer(Collection collection, Metric metric) { + return this.timers.get(metric).time(); + } + + @Nonnull + public Context startTimer(Collection collection, String documentId, Metric metric) { + return startTimer(collection, metric); + } + + @Nonnull + public Context startTimer(Collection collection, + String fromDocId, + String toDocId, + Metric metric) { + return startTimer(collection, metric); + } + + @Nonnull + private Map fetchTimers(StatisticsProvider provider) { + if (provider == null) { + provider = StatisticsProvider.NOOP; + } + + Map timerMap = new EnumMap(Metric.class); + String name; + TimerStats timer; + + for (Metric m: Metric.values()) { + name = m.getFullName(); + timer = provider.getTimer(name); + timerMap.put(m, timer); + } + return timerMap; + } +} diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java index e6cf30a..bb18260 100644 --- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java +++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java @@ -53,6 +53,7 @@ import org.apache.jackrabbit.oak.plugins.document.Document; import org.apache.jackrabbit.oak.plugins.document.DocumentMK; import org.apache.jackrabbit.oak.plugins.document.DocumentStore; import org.apache.jackrabbit.oak.plugins.document.DocumentStoreException; +import org.apache.jackrabbit.oak.plugins.document.DocumentStoreStats; import org.apache.jackrabbit.oak.plugins.document.JournalEntry; import org.apache.jackrabbit.oak.plugins.document.NodeDocument; import org.apache.jackrabbit.oak.plugins.document.Revision; @@ -67,6 +68,7 @@ import org.apache.jackrabbit.oak.plugins.document.cache.NodeDocumentCache; import org.apache.jackrabbit.oak.plugins.document.locks.TreeNodeDocumentLocks; import org.apache.jackrabbit.oak.plugins.document.util.Utils; import org.apache.jackrabbit.oak.stats.Clock; +import org.apache.jackrabbit.oak.stats.TimerStats.Context; import org.apache.jackrabbit.oak.util.PerfLogger; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -116,6 +118,8 @@ public class MongoDocumentStore implements DocumentStore { private final TreeNodeDocumentLocks nodeLocks; + private final DocumentStoreStats stats; + private Clock clock = Clock.SIMPLE; private final long maxReplicationLagMillis; @@ -176,6 +180,8 @@ public class MongoDocumentStore implements DocumentStore { settings = db.getCollection(Collection.SETTINGS.toString()); journal = db.getCollection(Collection.JOURNAL.toString()); + stats = new DocumentStoreStats(builder.getStatisticsProvider()); + maxReplicationLagMillis = builder.getMaxReplicationLagMillis(); // indexes: @@ -406,6 +412,21 @@ public class MongoDocumentStore implements DocumentStore { throw new DocumentStoreException("Failed to load document with " + key, t); } + + @CheckForNull + private T findUncachedWithRetry( + Collection collection, String key, + DocumentReadPreference docReadPref, + int retries) { + Context ctx = this.stats.startTimer(collection, key, DocumentStoreStats.Metric.FIND); + try { + return findUncachedWithRetryImpl(collection, key, docReadPref, retries); + } + finally { + ctx.stop(); + } + } + /** * Finds a document and performs a number of retries if the read fails with * an exception. @@ -418,7 +439,7 @@ public class MongoDocumentStore implements DocumentStore { * @return the document or {@code null} if the document doesn't exist. */ @CheckForNull - private T findUncachedWithRetry( + private T findUncachedWithRetryImpl( Collection collection, String key, DocumentReadPreference docReadPref, int retries) { @@ -504,6 +525,22 @@ public class MongoDocumentStore implements DocumentStore { String indexedProperty, long startValue, int limit) { + Context ctx = this.stats.startTimer(collection, fromKey, toKey, DocumentStoreStats.Metric.QUERY); + try { + return queryImpl(collection, fromKey, toKey, indexedProperty, startValue, limit); + } + finally { + ctx.stop(); + } + } + + @Nonnull + private List queryImpl(Collection collection, + String fromKey, + String toKey, + String indexedProperty, + long startValue, + int limit) { boolean withLock = true; if (collection == Collection.NODES && indexedProperty != null) { long maxQueryTime; @@ -621,6 +658,16 @@ public class MongoDocumentStore implements DocumentStore { @Override public void remove(Collection collection, String key) { + Context ctx = this.stats.startTimer(collection, DocumentStoreStats.Metric.REMOVE); + try { + removeImpl(collection, key); + } + finally { + ctx.stop(); + } + } + + private void removeImpl(Collection collection, String key) { log("remove", key); DBCollection dbCollection = getDBCollection(collection); long start = PERFLOG.start(); @@ -633,9 +680,18 @@ public class MongoDocumentStore implements DocumentStore { PERFLOG.end(start, 1, "remove key={}", key); } } - @Override public void remove(Collection collection, List keys) { + Context ctx = this.stats.startTimer(collection, DocumentStoreStats.Metric.REMOVE); + try { + removeImpl(collection, keys); + } + finally { + ctx.stop(); + } + } + + private void removeImpl(Collection collection, List keys) { log("remove", keys); DBCollection dbCollection = getDBCollection(collection); long start = PERFLOG.start(); @@ -662,6 +718,17 @@ public class MongoDocumentStore implements DocumentStore { @Override public int remove(Collection collection, Map> toRemove) { + Context ctx = this.stats.startTimer(collection, DocumentStoreStats.Metric.REMOVE); + try { + return removeImpl(collection, toRemove); + } + finally { + ctx.stop(); + } + } + + private int removeImpl(Collection collection, + Map> toRemove) { log("remove", toRemove); int num = 0; DBCollection dbCollection = getDBCollection(collection); @@ -784,24 +851,46 @@ public class MongoDocumentStore implements DocumentStore { @Override public T createOrUpdate(Collection collection, UpdateOp update) throws DocumentStoreException { + Context ctx = this.stats.startTimer(collection, DocumentStoreStats.Metric.CREATE_OR_UPDATE); + try { log("createOrUpdate", update); UpdateUtils.assertUnconditional(update); T doc = findAndModify(collection, update, true, false); log("createOrUpdate returns ", doc); return doc; } + finally { + ctx.stop(); + } + } @Override public T findAndUpdate(Collection collection, UpdateOp update) throws DocumentStoreException { + Context ctx = this.stats.startTimer(collection, DocumentStoreStats.Metric.FIND_AND_MODIFY); + try { log("findAndUpdate", update); T doc = findAndModify(collection, update, false, true); log("findAndUpdate returns ", doc); return doc; } + finally { + ctx.stop(); + } + } @Override public boolean create(Collection collection, List updateOps) { + Context ctx = this.stats.startTimer(collection, DocumentStoreStats.Metric.CREATE); + try { + return createImpl(collection, updateOps); + } + finally { + ctx.stop(); + } + } + + private boolean createImpl(Collection collection, List updateOps) { log("create", updateOps); List docs = new ArrayList(); DBObject[] inserts = new DBObject[updateOps.size()]; @@ -873,11 +962,22 @@ public class MongoDocumentStore implements DocumentStore { PERFLOG.end(start, 1, "create"); } } - @Override public void update(Collection collection, List keys, UpdateOp updateOp) { + Context ctx = this.stats.startTimer(collection, DocumentStoreStats.Metric.REMOVE); + try { + updateImpl(collection, keys, updateOp); + } + finally { + ctx.stop(); + } + } + + private void updateImpl(Collection collection, + List keys, + UpdateOp updateOp) { log("update", keys, updateOp); UpdateUtils.assertUnconditional(updateOp); DBCollection dbCollection = getDBCollection(collection); diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java index 098dce9..9f072ab 100755 --- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java +++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java @@ -74,6 +74,8 @@ import org.apache.jackrabbit.oak.plugins.document.cache.NodeDocumentCache; import org.apache.jackrabbit.oak.plugins.document.locks.NodeDocumentLocks; import org.apache.jackrabbit.oak.plugins.document.locks.StripedNodeDocumentLocks; import org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore; +import org.apache.jackrabbit.oak.plugins.document.DocumentStoreStats; +import org.apache.jackrabbit.oak.stats.TimerStats.Context; import org.apache.jackrabbit.oak.util.OakVersion; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -193,6 +195,8 @@ import com.google.common.hash.PrimitiveSink; */ public class RDBDocumentStore implements DocumentStore { + private DocumentStoreStats stats; + /** * Creates a {@linkplain RDBDocumentStore} instance using the provided * {@link DataSource}, {@link DocumentMK.Builder}, and {@link RDBOptions}. @@ -221,6 +225,7 @@ public class RDBDocumentStore implements DocumentStore { @Override public T find(final Collection collection, final String id, int maxCacheAge) { + // Profile only readDocumentUncached return readDocumentCached(collection, id, maxCacheAge); } @@ -234,12 +239,18 @@ public class RDBDocumentStore implements DocumentStore { @Override public List query(Collection collection, String fromKey, String toKey, String indexedProperty, long startValue, int limit) { + Context ctx = this.stats.startTimer(collection, fromKey, toKey, DocumentStoreStats.Metric.QUERY); + try { List conditions = Collections.emptyList(); if (indexedProperty != null) { conditions = Collections.singletonList(new QueryCondition(indexedProperty, ">=", startValue)); } return internalQuery(collection, fromKey, toKey, EMPTY_KEY_PATTERN, conditions, limit); } + finally { + ctx.stop(); + } + } @Nonnull protected List query(Collection collection, String fromKey, String toKey, @@ -249,49 +260,92 @@ public class RDBDocumentStore implements DocumentStore { @Override public void remove(Collection collection, String id) { + Context ctx = this.stats.startTimer(collection, DocumentStoreStats.Metric.QUERY); + try { delete(collection, id); invalidateCache(collection, id, true); } + finally { + ctx.stop(); + } + } @Override public void remove(Collection collection, List ids) { + Context ctx = this.stats.startTimer(collection, DocumentStoreStats.Metric.REMOVE); + try { for (String id : ids) { invalidateCache(collection, id, true); } delete(collection, ids); } + finally { + ctx.stop(); + } + } @Override public int remove(Collection collection, Map> toRemove) { + Context ctx = this.stats.startTimer(collection, DocumentStoreStats.Metric.REMOVE); + try { int num = delete(collection, toRemove); for (String id : toRemove.keySet()) { invalidateCache(collection, id, true); } return num; } + finally { + ctx.stop(); + } + } @Override public boolean create(Collection collection, List updateOps) { + Context ctx = this.stats.startTimer(collection, DocumentStoreStats.Metric.CREATE); + try { return internalCreate(collection, updateOps); } + finally { + ctx.stop(); + } + } @Override public void update(Collection collection, List keys, UpdateOp updateOp) { UpdateUtils.assertUnconditional(updateOp); + + Context ctx = this.stats.startTimer(collection, DocumentStoreStats.Metric.UPDATE); + try { internalUpdate(collection, keys, updateOp); } + finally { + ctx.stop(); + } + } @Override public T createOrUpdate(Collection collection, UpdateOp update) { UpdateUtils.assertUnconditional(update); + Context ctx = this.stats.startTimer(collection, DocumentStoreStats.Metric.CREATE_OR_UPDATE); + try { return internalCreateOrUpdate(collection, update, true, false); } + finally { + ctx.stop(); + } + } @Override public T findAndUpdate(Collection collection, UpdateOp update) { + Context ctx = this.stats.startTimer(collection, DocumentStoreStats.Metric.FIND_AND_MODIFY); + try { return internalCreateOrUpdate(collection, update, false, true); } + finally { + ctx.stop(); + } + } @Override public CacheInvalidationStats invalidateCache() { @@ -528,6 +582,8 @@ public class RDBDocumentStore implements DocumentStore { private void initialize(DataSource ds, DocumentMK.Builder builder, RDBOptions options) throws Exception { + this.stats = new DocumentStoreStats(builder.getStatisticsProvider()); + this.tableMeta.put(Collection.NODES, new RDBTableMetaData(createTableName(options.getTablePrefix(), TABLEMAP.get(Collection.NODES)))); this.tableMeta.put(Collection.CLUSTER_NODES, @@ -1272,6 +1328,21 @@ public class RDBDocumentStore implements DocumentStore { @CheckForNull private T readDocumentUncached(Collection collection, String id, NodeDocument cachedDoc) { + Context ctx = this.stats.startTimer(collection, id, DocumentStoreStats.Metric.FIND); + try { + return readDocumentUncachedImpl(collection, id, cachedDoc); + } + finally { + ctx.stop(); + } + } + + /** + * The real read document action. This method should not be used. Use the readDocumentUncached wrapper that also + * profiles the method call. + */ + @CheckForNull + private T readDocumentUncachedImpl(Collection collection, String id, NodeDocument cachedDoc) { Connection connection = null; RDBTableMetaData tmd = getTable(collection); try { diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/metric/StatisticsProviderFactory.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/metric/StatisticsProviderFactory.java index 102d532..b55cfc8 100644 --- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/metric/StatisticsProviderFactory.java +++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/metric/StatisticsProviderFactory.java @@ -66,6 +66,7 @@ public class StatisticsProviderFactory { "com.codahale.metrics.MetricRegistry"; @Property(value = TYPE_AUTO, options = { + @PropertyOption(name = TYPE_AUTO, value = TYPE_AUTO), @PropertyOption(name = TYPE_DEFAULT, value = TYPE_DEFAULT), @PropertyOption(name = TYPE_METRIC, value = TYPE_METRIC), @PropertyOption(name = TYPE_NONE, value = TYPE_NONE)}) diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentStoreStatsTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentStoreStatsTest.java new file mode 100644 index 0000000..5bd379d --- /dev/null +++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/DocumentStoreStatsTest.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document; + +import org.apache.jackrabbit.oak.stats.DefaultStatisticsProvider; +import org.apache.jackrabbit.oak.stats.StatisticsProvider; +import org.apache.jackrabbit.oak.stats.TimerStats; +import org.junit.Test; + +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; + +import static org.junit.Assert.assertNotNull; + +public class DocumentStoreStatsTest { + + private static final Collection[] COLLECTIONS = { + Collection.NODES, + Collection.CLUSTER_NODES, + Collection.JOURNAL, + Collection.SETTINGS + }; + + @Test + public void basicSetup() throws Exception { + DocumentStoreStats stats = new DocumentStoreStats(createDefaultStatisticsProvider()); + TimerStats.Context ctx; + + for(Collection c: COLLECTIONS) { + for(DocumentStoreStats.Metric m: DocumentStoreStats.Metric.values()) { + ctx = stats.startTimer(c, m); + assertNotNull(ctx); + ctx.stop(); + } + } + } + + @Test + public void metricNames() throws Exception { + for (DocumentStoreStats.Metric m: DocumentStoreStats.Metric.values()) { + assertNotNull(m.getFullName()); + } + } + + private StatisticsProvider createDefaultStatisticsProvider() { + ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor(); + DefaultStatisticsProvider defaultStatisticsProvider = new DefaultStatisticsProvider(executorService); + + return defaultStatisticsProvider; + } +}