Index: oak-auth-external/pom.xml =================================================================== --- oak-auth-external/pom.xml (revision 1633286) +++ oak-auth-external/pom.xml (working copy) @@ -161,7 +161,7 @@ com.h2database h2 - 1.3.175 + 1.4.182 test Index: oak-core/pom.xml =================================================================== --- oak-core/pom.xml (revision 1633090) +++ oak-core/pom.xml (working copy) @@ -289,7 +289,7 @@ com.h2database h2 - 1.3.175 + 1.4.182 true Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentMK.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentMK.java (revision 1633090) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentMK.java (working copy) @@ -33,6 +33,7 @@ import org.apache.jackrabbit.mk.api.MicroKernel; import org.apache.jackrabbit.mk.api.MicroKernelException; import org.apache.jackrabbit.oak.spi.blob.BlobStore; +import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore; import org.apache.jackrabbit.oak.spi.blob.MemoryBlobStore; import org.apache.jackrabbit.oak.commons.json.JsopReader; import org.apache.jackrabbit.oak.commons.json.JsopStream; @@ -46,14 +47,27 @@ import org.apache.jackrabbit.oak.plugins.document.mongo.MongoBlobStore; import org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore; import org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore; +import org.apache.jackrabbit.oak.plugins.document.persistentCache.CacheType; +import org.apache.jackrabbit.oak.plugins.document.persistentCache.PersistentCache; import org.apache.jackrabbit.oak.plugins.document.rdb.RDBBlobStore; import org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore; +import org.apache.jackrabbit.oak.plugins.document.util.StringValue; import org.apache.jackrabbit.oak.stats.Clock; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A MicroKernel implementation that stores the data in a {@link DocumentStore}. */ public class DocumentMK implements MicroKernel { + + static final Logger LOG = LoggerFactory.getLogger(DocumentMK.class); + + /** + * The path where the persistent cache is stored. + */ + static final String PERSISTENT_CACHE = + System.getProperty("oak.documentMK.persCache"); /** * The threshold where special handling for many child node starts. @@ -468,6 +482,7 @@ private boolean disableBranches; private Clock clock = Clock.SIMPLE; private Executor executor; + private PersistentCache persistentCache; public Builder() { memoryCacheSize(DEFAULT_MEMORY_CACHE_SIZE); @@ -488,7 +503,12 @@ } if (this.blobStore == null) { - this.blobStore = new MongoBlobStore(db, blobCacheSizeMB * 1024 * 1024L); + GarbageCollectableBlobStore s = new MongoBlobStore(db, blobCacheSizeMB * 1024 * 1024L); + PersistentCache p = getPersistentCache(); + if (p != null) { + s = p.wrapBlobStore(s); + } + this.blobStore = s; } if (this.diffCache == null) { @@ -759,9 +779,62 @@ public DocumentMK open() { return new DocumentMK(this); } + + public Cache buildNodeCache(DocumentNodeStore store) { + return buildCache(CacheType.NODE, getNodeCacheSize(), store, null); + } + + public Cache buildChildrenCache() { + return buildCache(CacheType.CHILDREN, getChildrenCacheSize(), null, null); + } + + public Cache buildDocChildrenCache() { + return buildCache(CacheType.DOC_CHILDREN, getDocChildrenCacheSize(), null, null); + } + + public Cache buildDiffCache() { + return buildCache(CacheType.DIFF, getDiffCacheSize(), null, null); + } - public Cache buildCache(long maxWeight) { - if (LIRS_CACHE) { + public Cache buildDocumentCache(DocumentStore docStore) { + return buildCache(CacheType.DOCUMENT, getDocumentCacheSize(), null, docStore); + } + + private Cache buildCache( + CacheType cacheType, + long maxWeight, + DocumentNodeStore docNodeStore, + DocumentStore docStore + ) { + Cache cache = buildCache(maxWeight); + PersistentCache p = getPersistentCache(); + if (p != null) { + if (docNodeStore != null) { + docNodeStore.setPersistentCache(p); + } + cache = p.wrap(docNodeStore, docStore, cache, cacheType); + } + return cache; + } + + private PersistentCache getPersistentCache() { + if (PERSISTENT_CACHE == null) { + return null; + } + if (persistentCache == null) { + try { + persistentCache = new PersistentCache(PERSISTENT_CACHE); + } catch (Throwable e) { + LOG.warn("Persistent cache not available; please disable the configuration", e); + throw new IllegalArgumentException(e); + } + } + return persistentCache; + } + + private Cache buildCache( + long maxWeight) { + if (LIRS_CACHE || PERSISTENT_CACHE != null) { return CacheLIRS.newBuilder(). weigher(weigher). averageWeight(2000). @@ -776,6 +849,7 @@ recordStats(). build(); } + } - + } Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeState.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeState.java (revision 1633090) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeState.java (working copy) @@ -18,8 +18,10 @@ import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.Iterator; import java.util.Map; +import java.util.Map.Entry; import java.util.NoSuchElementException; import java.util.Set; @@ -59,7 +61,7 @@ /** * A {@link NodeState} implementation for the {@link DocumentNodeStore}. */ -class DocumentNodeState extends AbstractNodeState implements CacheValue { +public class DocumentNodeState extends AbstractNodeState implements CacheValue { public static final Children NO_CHILDREN = new Children(); @@ -106,11 +108,15 @@ return true; } else if (that instanceof DocumentNodeState) { DocumentNodeState other = (DocumentNodeState) that; - if (getPath().equals(other.getPath())) { - if (revisionEquals(other)) { - return true; - } + if (!getPath().equals(other.getPath())) { + // path does not match: not equals + // (even if the properties are equal) + return false; } + if (revisionEquals(other)) { + return true; + } + // revision does not match: might still be equals } else if (that instanceof ModifiedNodeState) { ModifiedNodeState modified = (ModifiedNodeState) that; if (modified.getBaseState() == this) { @@ -119,9 +125,8 @@ } if (that instanceof NodeState) { return AbstractNodeState.equals(this, (NodeState) that); - } else { - return false; } + return false; } @Override @@ -480,6 +485,71 @@ }); } + public String asString() { + JsopWriter json = new JsopBuilder(); + json.key("path").value(path); + json.key("rev").value(rev.toString()); + if (lastRevision != null) { + json.key("lastRev").value(lastRevision.toString()); + } + if (hasChildren) { + json.key("hasChildren").value(hasChildren); + } + if (properties.size() > 0) { + json.key("prop").object(); + for (String k : properties.keySet()) { + json.key(k).value(getPropertyAsString(k)); + } + json.endObject(); + } + return json.toString(); + } + + public static DocumentNodeState fromString(DocumentNodeStore store, String s) { + JsopTokenizer json = new JsopTokenizer(s); + String path = null; + Revision rev = null; + Revision lastRev = null; + boolean hasChildren = false; + DocumentNodeState state = null; + HashMap map = new HashMap(); + while (true) { + String k = json.readString(); + json.read(':'); + if ("path".equals(k)) { + path = json.readString(); + } else if ("rev".equals(k)) { + rev = Revision.fromString(json.readString()); + } else if ("lastRev".equals(k)) { + lastRev = Revision.fromString(json.readString()); + } else if ("hasChildren".equals(k)) { + hasChildren = json.read() == JsopReader.TRUE; + } else if ("prop".equals(k)) { + json.read('{'); + while (true) { + if (json.matches('}')) { + break; + } + k = json.readString(); + json.read(':'); + String v = json.readString(); + map.put(k, v); + json.matches(','); + } + } + if (json.matches(JsopReader.END)) { + break; + } + json.read(','); + } + state = new DocumentNodeState(store, path, rev, hasChildren); + state.setLastRevision(lastRev); + for (Entry e : map.entrySet()) { + state.setProperty(e.getKey(), e.getValue()); + } + return state; + } + /** * A list of children for a node. */ @@ -489,21 +559,71 @@ * Ascending sorted list of names of child nodes. */ final ArrayList children = new ArrayList(); + int cachedMemory; boolean hasMore; @Override public int getMemory() { - int size = 114; - for (String c : children) { - size += c.length() * 2 + 56; + if (cachedMemory == 0) { + int size = 114; + for (String c : children) { + size += c.length() * 2 + 56; + } + cachedMemory = size; } - return size; + return cachedMemory; } @Override public String toString() { return children.toString(); } + + public String asString() { + JsopWriter json = new JsopBuilder(); + if (hasMore) { + json.key("hasMore").value(true); + } + if (children.size() > 0) { + json.key("children").array(); + for (String c : children) { + json.value(c); + } + json.endArray(); + } + return json.toString(); + } + + public static Children fromString(String s) { + JsopTokenizer json = new JsopTokenizer(s); + Children children = new Children(); + while (true) { + if (json.matches(JsopReader.END)) { + break; + } + String k = json.readString(); + json.read(':'); + if ("hasMore".equals(k)) { + children.hasMore = json.read() == JsopReader.TRUE; + } else if ("children".equals(k)) { + json.read('['); + while (true) { + if (json.matches(']')) { + break; + } + String value = json.readString(); + children.children.add(value); + json.matches(','); + } + } + if (json.matches(JsopReader.END)) { + break; + } + json.read(','); + } + return children; + } + } private class ChildNodeEntryIterator implements Iterator { Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java (revision 1633090) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStore.java (working copy) @@ -64,13 +64,13 @@ import org.apache.jackrabbit.oak.plugins.blob.MarkSweepGarbageCollector; import org.apache.jackrabbit.oak.plugins.document.mongo.MongoBlobReferenceIterator; import org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore; +import org.apache.jackrabbit.oak.plugins.document.persistentCache.PersistentCache; import org.apache.jackrabbit.oak.spi.blob.BlobStore; import org.apache.jackrabbit.oak.commons.json.JsopStream; import org.apache.jackrabbit.oak.commons.json.JsopWriter; import org.apache.jackrabbit.oak.api.Blob; import org.apache.jackrabbit.oak.api.CommitFailedException; import org.apache.jackrabbit.oak.cache.CacheStats; -import org.apache.jackrabbit.oak.cache.CacheValue; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.kernel.BlobSerializer; import org.apache.jackrabbit.oak.plugins.document.Branch.BranchCommit; @@ -249,7 +249,7 @@ * * Key: PathRev, value: DocumentNodeState */ - private final Cache nodeCache; + private final Cache nodeCache; private final CacheStats nodeCacheStats; /** @@ -257,7 +257,7 @@ * * Key: PathRev, value: Children */ - private final Cache nodeChildrenCache; + private final Cache nodeChildrenCache; private final CacheStats nodeChildrenCacheStats; /** @@ -265,7 +265,7 @@ * * Key: StringValue, value: Children */ - private final Cache docChildrenCache; + private final Cache docChildrenCache; private final CacheStats docChildrenCacheStats; /** @@ -319,6 +319,8 @@ private final boolean disableBranches; + private PersistentCache persistentCache; + public DocumentNodeStore(DocumentMK.Builder builder) { this.blobStore = builder.getBlobStore(); if (builder.isUseSimpleRevision()) { @@ -361,15 +363,15 @@ //TODO Make stats collection configurable as it add slight overhead - nodeCache = builder.buildCache(builder.getNodeCacheSize()); + nodeCache = builder.buildNodeCache(this); nodeCacheStats = new CacheStats(nodeCache, "Document-NodeState", builder.getWeigher(), builder.getNodeCacheSize()); - nodeChildrenCache = builder.buildCache(builder.getChildrenCacheSize()); + nodeChildrenCache = builder.buildChildrenCache(); nodeChildrenCacheStats = new CacheStats(nodeChildrenCache, "Document-NodeChildren", builder.getWeigher(), builder.getChildrenCacheSize()); - docChildrenCache = builder.buildCache(builder.getDocChildrenCacheSize()); + docChildrenCache = builder.buildDocChildrenCache(); docChildrenCacheStats = new CacheStats(docChildrenCache, "Document-DocChildren", builder.getWeigher(), builder.getDocChildrenCacheSize()); @@ -465,6 +467,9 @@ } } } + if (persistentCache != null) { + persistentCache.close(); + } } @Nonnull @@ -686,7 +691,7 @@ return n; } }); - return node == missing ? null : node; + return node == missing || node.equals(missing) ? null : node; } catch (ExecutionException e) { throw new MicroKernelException(e); } @@ -701,32 +706,28 @@ } final String path = checkNotNull(parent).getPath(); final Revision readRevision = parent.getLastRevision(); - PathRev key = childNodeCacheKey(path, readRevision, name); - DocumentNodeState.Children children; - for (;;) { - try { - children = nodeChildrenCache.get(key, new Callable() { - @Override - public DocumentNodeState.Children call() throws Exception { - return readChildren(parent, name, limit); - } - }); - } catch (ExecutionException e) { - throw new MicroKernelException( - "Error occurred while fetching children for path " - + path, e.getCause()); + try { + PathRev key = childNodeCacheKey(path, readRevision, name); + DocumentNodeState.Children children = nodeChildrenCache.get(key, new Callable() { + @Override + public DocumentNodeState.Children call() throws Exception { + return readChildren(parent, name, limit); + } + }); + if (children.children.size() < limit && children.hasMore) { + // not enough children loaded - load more, + // and put that in the cache + // (not using nodeChildrenCache.invalidate, because + // the generational persistent cache doesn't support that) + children = readChildren(parent, name, limit); + nodeChildrenCache.put(key, children); } - if (children.hasMore && limit > children.children.size()) { - // there are potentially more children and - // current cache entry contains less than requested limit - // -> need to refresh entry with current limit - nodeChildrenCache.invalidate(key); - } else { - // use this cache entry - break; - } + return children; + } catch (ExecutionException e) { + throw new MicroKernelException( + "Error occurred while fetching children for path " + + path, e.getCause()); } - return children; } /** @@ -812,7 +813,7 @@ // or more than 16k child docs are requested return store.query(Collection.NODES, from, to, limit); } - CacheValue key = new StringValue(path); + StringValue key = new StringValue(path); // check cache NodeDocument.Children c = docChildrenCache.getIfPresent(key); if (c == null) { @@ -937,13 +938,13 @@ } } if (isNew) { - CacheValue key = childNodeCacheKey(path, rev, null); DocumentNodeState.Children c = new DocumentNodeState.Children(); Set set = Sets.newTreeSet(); for (String p : added) { set.add(Utils.unshareString(PathUtils.getName(p))); } c.children.addAll(set); + PathRev key = childNodeCacheKey(path, rev, null); nodeChildrenCache.put(key, c); } @@ -962,7 +963,7 @@ // update docChildrenCache if (!added.isEmpty()) { - CacheValue docChildrenKey = new StringValue(path); + StringValue docChildrenKey = new StringValue(path); NodeDocument.Children docChildren = docChildrenCache.getIfPresent(docChildrenKey); if (docChildren != null) { int currentSize = docChildren.childNames.size(); @@ -1628,7 +1629,11 @@ if (toNode != null) { // exists in both revisions // check if different - if (!fromNode.getLastRevision().equals(toNode.getLastRevision())) { + Revision a = fromNode.getLastRevision(); + Revision b = toNode.getLastRevision(); + if (a == null && b == null) { + // ok + } else if (a == null || b == null || !a.equals(b)) { w.tag('^').key(name).object().endObject().newline(); } } else { @@ -1693,7 +1698,8 @@ private static PathRev childNodeCacheKey(@Nonnull String path, @Nonnull Revision readRevision, @Nullable String name) { - return new PathRev((name == null ? "" : name) + path, readRevision); + String p = (name == null ? "" : name) + path; + return new PathRev(p, readRevision); } private static DocumentRootBuilder asDocumentRootBuilder(NodeBuilder builder) @@ -1878,4 +1884,8 @@ public LastRevRecoveryAgent getLastRevRecoveryAgent() { return lastRevRecoveryAgent; } + + public void setPersistentCache(PersistentCache persistentCache) { + this.persistentCache = persistentCache; + } } Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/MemoryDiffCache.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/MemoryDiffCache.java (revision 1633090) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/MemoryDiffCache.java (working copy) @@ -20,7 +20,6 @@ import javax.annotation.Nonnull; import org.apache.jackrabbit.oak.cache.CacheStats; -import org.apache.jackrabbit.oak.cache.CacheValue; import org.apache.jackrabbit.oak.plugins.document.util.StringValue; import com.google.common.cache.Cache; @@ -37,12 +36,12 @@ * * Key: PathRev, value: StringValue */ - protected final Cache diffCache; + protected final Cache diffCache; protected final CacheStats diffCacheStats; MemoryDiffCache(DocumentMK.Builder builder) { - diffCache = builder.buildCache(builder.getDiffCacheSize()); + diffCache = builder.buildDiffCache(); diffCacheStats = new CacheStats(diffCache, "Document-Diff", builder.getWeigher(), builder.getDiffCacheSize()); } Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java (revision 1633090) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/NodeDocument.java (working copy) @@ -22,6 +22,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.NavigableMap; import java.util.Queue; import java.util.Set; @@ -43,6 +44,10 @@ import com.google.common.collect.Queues; import org.apache.jackrabbit.oak.cache.CacheValue; import org.apache.jackrabbit.oak.commons.PathUtils; +import org.apache.jackrabbit.oak.commons.json.JsopBuilder; +import org.apache.jackrabbit.oak.commons.json.JsopReader; +import org.apache.jackrabbit.oak.commons.json.JsopTokenizer; +import org.apache.jackrabbit.oak.commons.json.JsopWriter; import org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore; import org.apache.jackrabbit.oak.plugins.document.util.Utils; import org.slf4j.Logger; @@ -1363,7 +1368,7 @@ return false; } int c1 = context.getRevisionComparator().compare(r1, r2); - int c2 = r1.compareRevisionTimeThenClusterId(r2); + int c2 = r1.compareTo(r2); if (c1 == 0) { return c2 == 0; } else if (c1 < 0) { @@ -1661,12 +1666,89 @@ private Map getDeleted() { return ValueMap.create(this, DELETED); } - + + public String asString() { + JsopWriter json = new JsopBuilder(); + toJson(json, data); + return json.toString(); + } + + @SuppressWarnings("unchecked") + private static void toJson(JsopWriter json, Map map) { + for (Entrye : map.entrySet()) { + json.key(e.getKey().toString()); + Object value = e.getValue(); + if (value == null) { + json.value(null); + } else if (value instanceof Boolean) { + json.value((Boolean) value); + } else if (value instanceof Long) { + json.value((Long) value); + } else if (value instanceof Integer) { + json.value((Integer) value); + } else if (value instanceof Map) { + json.object(); + toJson(json, (Map) value); + json.endObject(); + } else if (value instanceof Revision) { + json.value(value.toString()); + } else { + json.value((String) value); + } + } + } + + public static NodeDocument fromString(DocumentStore store, String s) { + JsopTokenizer json = new JsopTokenizer(s); + NodeDocument doc = new NodeDocument(store); + while (true) { + if (json.matches(JsopReader.END)) { + break; + } + String k = json.readString(); + json.read(':'); + if (json.matches(JsopReader.END)) { + break; + } + doc.put(k, fromJson(json)); + json.matches(','); + } + return doc; + } + + private static Object fromJson(JsopTokenizer json) { + switch (json.read()) { + case JsopReader.NULL: + return null; + case JsopReader.TRUE: + return true; + case JsopReader.FALSE: + return false; + case JsopReader.NUMBER: + return Long.parseLong(json.getToken()); + case JsopReader.STRING: + return json.getToken(); + case '{': + TreeMap map = new TreeMap(StableRevisionComparator.REVERSE); + while (true) { + if (json.matches('}')) { + break; + } + String k = json.readString(); + json.read(':'); + map.put(Revision.fromString(k), fromJson(json)); + json.matches(','); + } + return map; + } + throw new IllegalArgumentException(json.readRawValue()); + } + /** * The list of children for a node. The list might be complete or not, in * which case it only represents a block of children. */ - static final class Children implements CacheValue, Cloneable { + public static final class Children implements CacheValue, Cloneable { /** * The child node names, ordered as stored in DocumentStore. @@ -1699,6 +1781,52 @@ throw new RuntimeException(); } } + + public String asString() { + JsopWriter json = new JsopBuilder(); + if (isComplete) { + json.key("isComplete").value(true); + } + if (childNames.size() > 0) { + json.key("children").array(); + for (String c : childNames) { + json.value(c); + } + json.endArray(); + } + return json.toString(); + } + + public static Children fromString(String s) { + JsopTokenizer json = new JsopTokenizer(s); + Children children = new Children(); + while (true) { + if (json.matches(JsopReader.END)) { + break; + } + String k = json.readString(); + json.read(':'); + if ("isComplete".equals(k)) { + children.isComplete = json.read() == JsopReader.TRUE; + } else if ("children".equals(k)) { + json.read('['); + while (true) { + if (json.matches(']')) { + break; + } + String value = json.readString(); + children.childNames.add(value); + json.matches(','); + } + } + if (json.matches(JsopReader.END)) { + break; + } + json.read(','); + } + return children; + } + } /** Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/PathRev.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/PathRev.java (revision 1633090) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/PathRev.java (working copy) @@ -69,4 +69,25 @@ public String toString() { return path + "@" + revision; } + + public String asString() { + return toString(); + } + + public static PathRev fromString(String s) { + int index = s.lastIndexOf('@'); + return new PathRev(s.substring(0, index), Revision.fromString(s.substring(index + 1))); + } + + public int compareTo(PathRev b) { + if (this == b) { + return 0; + } + int compare = path.compareTo(b.path); + if (compare == 0) { + compare = revision.compareTo(b.revision); + } + return compare; + } + } Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/Revision.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/Revision.java (revision 1633090) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/Revision.java (working copy) @@ -136,6 +136,22 @@ } return comp; } + + /** + * Compare all components of two revisions. + * + * @param other the other revision + * @return -1, 0, or 1 + */ + int compareTo(Revision other) { + int comp = compareRevisionTimeThenClusterId(other); + if (comp == 0) { + if (branch != other.branch) { + return branch ? -1 : 1; + } + } + return comp; + } /** * Compare the cluster node ids of both revisions. @@ -526,17 +542,17 @@ Revision range1 = getRevisionSeen(o1); Revision range2 = getRevisionSeen(o2); if (range1 == FUTURE && range2 == FUTURE) { - return o1.compareRevisionTimeThenClusterId(o2); + return o1.compareTo(o2); } if (range1 == null && range2 == null) { - return o1.compareRevisionTimeThenClusterId(o2); + return o1.compareTo(o2); } if (range1 == null) { return -1; } else if (range2 == null) { return 1; } - int comp = range1.compareRevisionTimeThenClusterId(range2); + int comp = range1.compareTo(range2); if (comp != 0) { return comp; } Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/StableRevisionComparator.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/StableRevisionComparator.java (revision 1633090) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/StableRevisionComparator.java (working copy) @@ -42,6 +42,6 @@ @Override public int compare(Revision o1, Revision o2) { - return o1.compareRevisionTimeThenClusterId(o2); + return o1.compareTo(o2); } } Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java (revision 1633090) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/mongo/MongoDocumentStore.java (working copy) @@ -195,7 +195,7 @@ if (builder.useOffHeapCache()) { nodesCache = createOffHeapCache(builder); } else { - nodesCache = builder.buildCache(builder.getDocumentCacheSize()); + nodesCache = builder.buildDocumentCache(this); } cacheStats = new CacheStats(nodesCache, "Document-Documents", builder.getWeigher(), Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/BlobCache.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/BlobCache.java (revision 0) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/BlobCache.java (working copy) @@ -0,0 +1,199 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document.persistentCache; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import javax.annotation.CheckForNull; +import javax.annotation.Nonnull; + +import org.apache.jackrabbit.oak.plugins.document.persistentCache.PersistentCache.GenerationCache; +import org.apache.jackrabbit.oak.spi.blob.BlobStore; +import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore; +import org.h2.mvstore.MVMapConcurrent; +import org.h2.mvstore.StreamStore; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A persistent blob cache. Only blobs that are smaller than 10% of the maximum + * cache size are stored. + */ +public class BlobCache implements BlobStore, GarbageCollectableBlobStore, GenerationCache { + + static final Logger LOG = LoggerFactory.getLogger(BlobCache.class); + + private final GarbageCollectableBlobStore base; + private final PersistentCache cache; + private final MultiGenerationMap meta; + private MultiGenerationMap data; + private StreamStore streamStore; + private long maxEntrySize; + + public BlobCache( + PersistentCache cache, + GarbageCollectableBlobStore base) { + this.cache = cache; + this.base = base; + data = new MultiGenerationMap(); + meta = new MultiGenerationMap(); + maxEntrySize = cache.getMaxBinaryEntrySize(); + } + + @Override + public void addGeneration(int generation, boolean readOnly) { + Map d = cache.openMap(generation, "data", + new MVMapConcurrent.Builder()); + data.addReadMap(generation, d); + Map m = cache.openMap(generation, "meta", + new MVMapConcurrent.Builder()); + meta.addReadMap(generation, m); + if (!readOnly) { + // the order is important: + // if we switch the data first, + // we could end up with the data in store 1 + // but the metadata in store 2 - which could + // result in a data block not found if store 1 + // is removed later on + meta.setWriteMap(m); + data.setWriteMap(d); + } + if (streamStore == null) { + streamStore = new StreamStore(data); + } + } + + @Override + public void removeGeneration(int generation) { + data.removeReadMap(generation); + meta.removeReadMap(generation); + } + + @Override + public InputStream getInputStream(String blobId) throws IOException { + if (streamStore == null) { + return base.getInputStream(blobId); + } + cache.switchGenerationIfNeeded(); + byte[] id = meta.get(blobId); + if (id == null) { + long length = base.getBlobLength(blobId); + InputStream in = base.getInputStream(blobId); + if (length < base.getBlockSizeMin()) { + // in-place + return in; + } + if (length > maxEntrySize) { + // too large, don't cache + return in; + } + id = streamStore.put(in); + in.close(); + meta.put(blobId, id); + } + return streamStore.get(id); + } + + @Override + public String writeBlob(InputStream in) throws IOException { + // TODO maybe copy the binary to the cache in a background thread + return base.writeBlob(in); + } + + @Override + public int readBlob(String blobId, long pos, byte[] buff, int off, + int length) throws IOException { + InputStream in = getInputStream(blobId); + long remainingSkip = pos; + while (remainingSkip > 0) { + remainingSkip -= in.skip(remainingSkip); + } + return in.read(buff, off, length); + } + + @Override + public long getBlobLength(String blobId) throws IOException { + return base.getBlobLength(blobId); + } + + @Override + @CheckForNull + public String getBlobId(@Nonnull String reference) { + return base.getBlobId(reference); + } + + @Override + @CheckForNull + public String getReference(@Nonnull String blobId) { + return base.getReference(blobId); + } + + @Override + public void clearCache() { + base.clearCache(); + } + + @Override + public void clearInUse() { + base.clearInUse(); + } + + @Override + public boolean deleteChunks(List arg0, long arg1) throws Exception { + return base.deleteChunks(arg0, arg1); + } + + @Override + public Iterator getAllChunkIds(long arg0) throws Exception { + return base.getAllChunkIds(arg0); + } + + @Override + public long getBlockSizeMin() { + return base.getBlockSizeMin(); + } + + @Override + public Iterator resolveChunks(String arg0) throws IOException { + return base.resolveChunks(arg0); + } + + @Override + public void setBlockSize(int arg0) { + base.setBlockSize(arg0); + } + + @Override + public void startMark() throws IOException { + base.startMark(); + } + + @Override + public int sweep() throws IOException { + return base.sweep(); + } + + @Override + public String writeBlob(String arg0) throws IOException { + return base.writeBlob(arg0); + } + +} Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/CacheType.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/CacheType.java (revision 0) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/CacheType.java (working copy) @@ -0,0 +1,166 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document.persistentCache; + +import org.apache.jackrabbit.oak.plugins.document.DocumentNodeState; +import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore; +import org.apache.jackrabbit.oak.plugins.document.DocumentStore; +import org.apache.jackrabbit.oak.plugins.document.NodeDocument; +import org.apache.jackrabbit.oak.plugins.document.PathRev; +import org.apache.jackrabbit.oak.plugins.document.util.StringValue; + +public enum CacheType { + + NODE { + @Override + public String keyToString(K key) { + return ((PathRev) key).asString(); + } + @SuppressWarnings("unchecked") + @Override + public K keyFromString(String key) { + return (K) PathRev.fromString(key); + } + @Override + public int compareKeys(K a, K b) { + return ((PathRev) a).compareTo((PathRev) b); + } + @Override + public String valueToString(V value) { + return ((DocumentNodeState) value).asString(); + } + @SuppressWarnings("unchecked") + @Override + public V valueFromString( + DocumentNodeStore store, DocumentStore docStore, String value) { + return (V) DocumentNodeState.fromString(store, value); + } + }, + + CHILDREN { + @Override + public String keyToString(K key) { + return ((PathRev) key).asString(); + } + @SuppressWarnings("unchecked") + @Override + public K keyFromString(String key) { + return (K) PathRev.fromString(key); + } + @Override + public int compareKeys(K a, K b) { + return ((PathRev) a).compareTo((PathRev) b); + } + @Override + public String valueToString(V value) { + return ((DocumentNodeState.Children) value).asString(); + } + @SuppressWarnings("unchecked") + @Override + public V valueFromString( + DocumentNodeStore store, DocumentStore docStore, String value) { + return (V) DocumentNodeState.Children.fromString(value); + } + }, + + DIFF { + @Override + public String keyToString(K key) { + return ((PathRev) key).asString(); + } + @SuppressWarnings("unchecked") + @Override + public K keyFromString(String key) { + return (K) PathRev.fromString(key); + } + @Override + public int compareKeys(K a, K b) { + return ((PathRev) a).compareTo((PathRev) b); + } + @Override + public String valueToString(V value) { + return ((StringValue) value).asString(); + } + @SuppressWarnings("unchecked") + @Override + public V valueFromString( + DocumentNodeStore store, DocumentStore docStore, String value) { + return (V) StringValue.fromString(value); + } + }, + + DOC_CHILDREN { + @Override + public String keyToString(K key) { + return ((StringValue) key).asString(); + } + @SuppressWarnings("unchecked") + @Override + public K keyFromString(String key) { + return (K) StringValue.fromString(key); + } + @Override + public int compareKeys(K a, K b) { + return ((StringValue) a).asString().compareTo(((StringValue) b).asString()); + } + @Override + public String valueToString(V value) { + return ((NodeDocument.Children) value).asString(); + } + @SuppressWarnings("unchecked") + @Override + public V valueFromString( + DocumentNodeStore store, DocumentStore docStore, String value) { + return (V) NodeDocument.Children.fromString(value); + } + }, + + DOCUMENT { + @Override + public String keyToString(K key) { + return ((StringValue) key).asString(); + } + @SuppressWarnings("unchecked") + @Override + public K keyFromString(String key) { + return (K) StringValue.fromString(key); + } + @Override + public int compareKeys(K a, K b) { + return ((StringValue) a).asString().compareTo(((StringValue) b).asString()); + } + @Override + public String valueToString(V value) { + return ((NodeDocument) value).asString(); + } + @SuppressWarnings("unchecked") + @Override + public V valueFromString( + DocumentNodeStore store, DocumentStore docStore, String value) { + return (V) NodeDocument.fromString(docStore, value); + } + }; + + public abstract String keyToString(K key); + public abstract K keyFromString(String key); + public abstract int compareKeys(K a, K b); + public abstract String valueToString(V value); + public abstract V valueFromString( + DocumentNodeStore store, DocumentStore docStore, String value); + +} + Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/KeyDataType.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/KeyDataType.java (revision 0) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/KeyDataType.java (working copy) @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document.persistentCache; + +import java.nio.ByteBuffer; + +import org.apache.jackrabbit.oak.cache.CacheValue; +import org.h2.mvstore.WriteBuffer; +import org.h2.mvstore.type.DataType; +import org.h2.mvstore.type.StringDataType; + +public class KeyDataType implements DataType { + + private final CacheType type; + + public KeyDataType(CacheType type) { + this.type = type; + } + + @Override + public int compare(Object a, Object b) { + return type.compareKeys(a, b); + } + + @Override + public int getMemory(Object obj) { + return ((CacheValue) obj).getMemory(); + } + + @Override + public void write(WriteBuffer buff, Object obj) { + String s = type.keyToString(obj); + StringDataType.INSTANCE.write(buff, s); + } + + @Override + public Object read(ByteBuffer buff) { + String s = StringDataType.INSTANCE.read(buff); + return type.keyFromString(s); + } + + @Override + public void write(WriteBuffer buff, Object[] obj, int len, boolean key) { + for (int i = 0; i < len; i++) { + write(buff, obj[i]); + } + } + + @Override + public void read(ByteBuffer buff, Object[] obj, int len, boolean key) { + for (int i = 0; i < len; i++) { + obj[i] = read(buff); + } + } + +} \ No newline at end of file Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/MultiGenerationMap.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/MultiGenerationMap.java (revision 0) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/MultiGenerationMap.java (working copy) @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document.persistentCache; + +import java.util.Collection; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentSkipListMap; + +public class MultiGenerationMap implements Map { + + private volatile Map write; + private ConcurrentSkipListMap> read = + new ConcurrentSkipListMap>(); + + MultiGenerationMap() { + } + + public void setWriteMap(Map m) { + write = m; + } + + public void addReadMap(int generation, Map m) { + read.put(generation, m); + } + + public void removeReadMap(int generation) { + read.remove(generation); + } + + @Override + public V put(K key, V value) { + return write.put(key, value); + } + + @SuppressWarnings("unchecked") + @Override + public V get(Object key) { + for (int generation : read.descendingKeySet()) { + Map m = read.get(generation); + if (m != null) { + V value = m.get(key); + if (value != null) { + if (m != write) { + put((K) key, value); + } + return value; + } + } + } + return null; + } + + @Override + public boolean containsKey(Object key) { + for (int generation : read.descendingKeySet()) { + Map m = read.get(generation); + if (m != null) { + if (m.containsKey(key)) { + return true; + } + } + } + return false; + } + + @Override + public V remove(Object key) { + return write.remove(key); + } + + @Override + public void clear() { + write.clear(); + } + + @Override + public int size() { + throw new UnsupportedOperationException(); + } + + @Override + public boolean isEmpty() { + throw new UnsupportedOperationException(); + } + + @Override + public boolean containsValue(Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public void putAll(Map m) { + throw new UnsupportedOperationException(); + } + + @Override + public Set keySet() { + throw new UnsupportedOperationException(); + } + + @Override + public Collection values() { + throw new UnsupportedOperationException(); + } + + @Override + public Set> entrySet() { + throw new UnsupportedOperationException(); + } + +} Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/NodeCache.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/NodeCache.java (revision 0) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/NodeCache.java (working copy) @@ -0,0 +1,179 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document.persistentCache; + +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.ExecutionException; + +import javax.annotation.Nullable; + +import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore; +import org.apache.jackrabbit.oak.plugins.document.DocumentStore; +import org.apache.jackrabbit.oak.plugins.document.persistentCache.PersistentCache.GenerationCache; +import org.h2.mvstore.MVMapConcurrent; +import org.h2.mvstore.type.DataType; + +import com.google.common.cache.Cache; +import com.google.common.cache.CacheStats; +import com.google.common.collect.ImmutableMap; + +class NodeCache implements Cache, GenerationCache { + + private final PersistentCache cache; + private final Cache memCache; + private final MultiGenerationMap map; + private final CacheType type; + private final DocumentNodeStore docNodeStore; + private final DocumentStore docStore; + + NodeCache( + PersistentCache cache, + Cache memCache, + DocumentNodeStore docNodeStore, + DocumentStore docStore, CacheType type) { + this.cache = cache; + this.memCache = memCache; + this.type = type; + this.docNodeStore = docNodeStore; + this.docStore = docStore; + PersistentCache.LOG.info("wrap " + this.type); + map = new MultiGenerationMap(); + } + + @Override + public void addGeneration(int generation, boolean readOnly) { + DataType keyType = new KeyDataType(type); + DataType valueType = new ValueDataType(docNodeStore, docStore, type); + MVMapConcurrent.Builder b = new MVMapConcurrent.Builder(). + keyType(keyType).valueType(valueType); + String mapName = type.name(); + Map m = cache.openMap(generation, mapName, b); + map.addReadMap(generation, m); + if (!readOnly) { + map.setWriteMap(m); + } + } + + @Override + public void removeGeneration(int generation) { + map.removeReadMap(generation); + } + + private V readIfPresent(K key) { + cache.switchGenerationIfNeeded(); + V v = map.get(key); + return v; + } + + public void write(K key, V value) { + cache.switchGenerationIfNeeded(); + if (value == null) { + map.remove(key); + } else { + map.put(key, value); + } + } + + @SuppressWarnings("unchecked") + @Override + @Nullable + public V getIfPresent(Object key) { + V value = memCache.getIfPresent(key); + if (value != null) { + return value; + } + value = readIfPresent((K) key); + if (value != null) { + memCache.put((K) key, value); + } + return value; + } + + @Override + public V get(K key, + Callable valueLoader) + throws ExecutionException { + V value = getIfPresent(key); + if (value != null) { + return value; + } + value = memCache.get(key, valueLoader); + write(key, value); + return value; + } + + @Override + public ImmutableMap getAllPresent( + Iterable keys) { + return memCache.getAllPresent(keys); + } + + @Override + public void put(K key, V value) { + memCache.put(key, value); + write(key, value); + } + + @SuppressWarnings("unchecked") + @Override + public void invalidate(Object key) { + memCache.invalidate(key); + ; + // TODO: this is currently needed for children; + // probably but shouldn't + write((K) key, (V) null); + } + + @Override + public void putAll(Map m) { + memCache.putAll(m); + } + + @Override + public void invalidateAll(Iterable keys) { + memCache.invalidateAll(keys); + } + + @Override + public void invalidateAll() { + memCache.invalidateAll(); + map.clear(); + } + + @Override + public long size() { + return memCache.size(); + } + + @Override + public CacheStats stats() { + return memCache.stats(); + } + + @Override + public ConcurrentMap asMap() { + return memCache.asMap(); + } + + @Override + public void cleanUp() { + memCache.cleanUp(); + } + +} \ No newline at end of file Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/PersistentCache.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/PersistentCache.java (revision 0) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/PersistentCache.java (working copy) @@ -0,0 +1,312 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document.persistentCache; + +import java.io.File; +import java.util.ArrayList; +import java.util.Map; +import java.util.TreeSet; + +import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore; +import org.apache.jackrabbit.oak.plugins.document.DocumentStore; +import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore; +import org.h2.mvstore.FileStore; +import org.h2.mvstore.MVMapConcurrent; +import org.h2.mvstore.MVStore; +import org.h2.mvstore.MVStoreTool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.cache.Cache; + +/** + * A persistent cache for the document store. + */ +public class PersistentCache { + + static final Logger LOG = LoggerFactory.getLogger(PersistentCache.class); + + private static final String FILE_PREFIX = "cache-"; + private static final String FILE_SUFFIX = ".data"; + + private boolean cacheNodes = true; + private boolean cacheChildren = true; + private boolean cacheDiff = true; + private boolean cacheDocs; + private boolean cacheDocChildren; + private boolean compactOnClose = true; + private boolean compress = true; + private ArrayList caches = + new ArrayList(); + + private final String directory; + private MVStore writeStore; + private MVStore readStore; + private int maxSizeMB = 1024; + private int readGeneration = -1; + private int writeGeneration = 0; + private long maxBinaryEntry = 1024 * 1024; + + public PersistentCache(String url) { + LOG.info("start version 1"); + String[] parts = url.split(","); + String dir = parts[0]; + for (String p : parts) { + if (p.equals("+docs")) { + cacheDocs = true; + } else if (p.equals("+docChildren")) { + cacheDocChildren = true; + } else if (p.equals("-nodes")) { + cacheNodes = false; + } else if (p.equals("-children")) { + cacheChildren = false; + } else if (p.equals("-diff")) { + cacheDiff = false; + } else if (p.equals("+all")) { + cacheDocs = true; + cacheDocChildren = true; + } else if (p.equals("-compact")) { + compactOnClose = false; + } else if (p.equals("-compress")) { + compress = false; + } else if (p.endsWith("time")) { + dir += "-" + System.currentTimeMillis(); + } else if (p.startsWith("size=")) { + maxSizeMB = Integer.parseInt(p.split("=")[1]); + } else if (p.startsWith("binary=")) { + maxBinaryEntry = Long.parseLong(p.split("=")[1]); + } + } + this.directory = dir; + if (dir.length() == 0) { + readGeneration = -1; + writeGeneration = 0; + writeStore = openStore(writeGeneration, false); + return; + } + File dr = new File(dir); + if (!dr.exists()) { + dr.mkdirs(); + } + if (dr.exists() && !dr.isDirectory()) { + throw new IllegalArgumentException("A file exists at cache directory " + dir); + } + File[] list = dr.listFiles(); + TreeSet generations = new TreeSet(); + if (list != null) { + for(File f : list) { + String fn = f.getName(); + if (fn.startsWith(FILE_PREFIX) && fn.endsWith(FILE_SUFFIX)) { + String g = fn.substring(FILE_PREFIX.length(), fn.indexOf(FILE_SUFFIX)); + try { + int gen = Integer.parseInt(g); + if (gen >= 0) { + File f2 = new File(getFileName(gen)); + if (fn.equals(f2.getName())) { + // ignore things like "cache-000.data" + generations.add(gen); + } + } + } catch (Exception e) { + // ignore this file + } + } + } + } + while (generations.size() > 2) { + generations.remove(generations.last()); + } + readGeneration = generations.size() > 1 ? generations.first() : -1; + writeGeneration = generations.size() > 0 ? generations.last() : 0; + if (readGeneration >= 0) { + readStore = openStore(readGeneration, true); + } + writeStore = openStore(writeGeneration, false); + } + + private String getFileName(int generation) { + if (directory.length() == 0) { + return null; + } + return directory + "/" + FILE_PREFIX + generation + FILE_SUFFIX; + } + + private MVStore openStore(int generation, boolean readOnly) { + String fileName = getFileName(generation); + MVStore.Builder builder = new MVStore.Builder(); + if (compress) { + builder.compress(); + } + if (fileName != null) { + builder.fileName(fileName); + } + if (readOnly) { + builder.readOnly(); + } + if (maxSizeMB < 10) { + builder.cacheSize(maxSizeMB); + } + builder.backgroundExceptionHandler(new Thread.UncaughtExceptionHandler() { + @Override + public void uncaughtException(Thread t, Throwable e) { + LOG.error("Error in persistent cache", e); + } + }); + return builder.open(); + } + + public void close() { + closeStore(writeStore, writeGeneration); + closeStore(readStore, readGeneration); + } + + private void closeStore(MVStore s, int generation) { + if (s == null) { + return; + } + String fileName = getFileName(generation); + boolean compact = compactOnClose; + if (s.getFileStore().isReadOnly()) { + compact = false; + } + s.close(); + if (compact) { + MVStoreTool.compact(fileName, true); + } + } + + public synchronized GarbageCollectableBlobStore wrapBlobStore( + GarbageCollectableBlobStore base) { + BlobCache c = new BlobCache(this, base); + initGenerationCache(c); + return c; + } + + public synchronized Cache wrap( + DocumentNodeStore docNodeStore, + DocumentStore docStore, + Cache base, CacheType type) { + boolean wrap; + switch (type) { + case NODE: + wrap = cacheNodes; + break; + case CHILDREN: + wrap = cacheChildren; + break; + case DIFF: + wrap = cacheDiff; + break; + case DOC_CHILDREN: + wrap = cacheDocChildren; + break; + case DOCUMENT: + wrap = cacheDocs; + break; + default: + wrap = false; + break; + } + if (wrap) { + NodeCache c = new NodeCache(this, base, docNodeStore, docStore, type); + initGenerationCache(c); + return c; + } + return base; + } + + private void initGenerationCache(GenerationCache c) { + caches.add(c); + if (readGeneration >= 0) { + c.addGeneration(readGeneration, true); + } + c.addGeneration(writeGeneration, false); + } + + synchronized Map openMap(int generation, String name, + MVMapConcurrent.Builder builder) { + MVStore s; + if (generation == readGeneration) { + s = readStore; + } else if (generation == writeGeneration) { + s = writeStore; + } else { + throw new IllegalArgumentException("Unknown generation: " + generation); + } + return s.openMap(name, builder); + } + + public void switchGenerationIfNeeded() { + if (!needSwitch()) { + return; + } + synchronized (this) { + // maybe another thread already switched, + // so we need to check again + if (!needSwitch()) { + return; + } + int oldReadGeneration = readGeneration; + MVStore oldRead = readStore; + readStore = writeStore; + readGeneration = writeGeneration; + MVStore w = openStore(writeGeneration + 1, false); + writeStore = w; + writeGeneration++; + for (GenerationCache c : caches) { + c.addGeneration(writeGeneration, false); + if (oldReadGeneration >= 0) { + c.removeGeneration(oldReadGeneration); + } + } + if (oldRead != null) { + oldRead.close(); + new File(getFileName(oldReadGeneration)).delete(); + } + } + } + + private boolean needSwitch() { + FileStore fs = writeStore.getFileStore(); + if (fs == null) { + return false; + } + long size = fs.size(); + if (size / 1024 / 1024 <= maxSizeMB) { + return false; + } + return true; + } + + public int getMaxSize() { + return maxSizeMB; + } + + public long getMaxBinaryEntrySize() { + return maxBinaryEntry; + } + + static interface GenerationCache { + + void addGeneration(int writeGeneration, boolean b); + + void removeGeneration(int oldReadGeneration); + + + } + +} Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/ValueDataType.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/ValueDataType.java (revision 0) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/ValueDataType.java (working copy) @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document.persistentCache; + +import java.nio.ByteBuffer; + +import org.apache.jackrabbit.oak.cache.CacheValue; +import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore; +import org.apache.jackrabbit.oak.plugins.document.DocumentStore; +import org.h2.mvstore.WriteBuffer; +import org.h2.mvstore.type.DataType; +import org.h2.mvstore.type.StringDataType; + +public class ValueDataType implements DataType { + + private final DocumentNodeStore docNodeStore; + private final DocumentStore docStore; + private final CacheType type; + + public ValueDataType( + DocumentNodeStore docNodeStore, + DocumentStore docStore, CacheType type) { + this.docNodeStore = docNodeStore; + this.docStore = docStore; + this.type = type; + } + + @Override + public int compare(Object a, Object b) { + return 0; + } + + @Override + public int getMemory(Object obj) { + return ((CacheValue) obj).getMemory(); + } + + @Override + public void write(WriteBuffer buff, Object obj) { + String s = type.valueToString(obj); + StringDataType.INSTANCE.write(buff, s); + } + + @Override + public Object read(ByteBuffer buff) { + String s = StringDataType.INSTANCE.read(buff); + return type.valueFromString(docNodeStore, docStore, s); + } + + @Override + public void write(WriteBuffer buff, Object[] obj, int len, boolean key) { + for (int i = 0; i < len; i++) { + write(buff, obj[i]); + } + } + + @Override + public void read(ByteBuffer buff, Object[] obj, int len, boolean key) { + for (int i = 0; i < len; i++) { + obj[i] = read(buff); + } + } + +} \ No newline at end of file Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java (revision 1633090) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java (working copy) @@ -242,7 +242,7 @@ this.ds = ds; this.callStack = LOG.isDebugEnabled() ? new Exception("call stack of RDBDocumentStore creation") : null; - this.nodesCache = builder.buildCache(builder.getDocumentCacheSize()); + this.nodesCache = builder.buildDocumentCache(this); this.cacheStats = new CacheStats(nodesCache, "Document-Documents", builder.getWeigher(), builder.getDocumentCacheSize()); Connection con = ds.getConnection(); Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/util/StringValue.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/util/StringValue.java (revision 1633090) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/util/StringValue.java (working copy) @@ -61,4 +61,13 @@ public String toString() { return value; } + + public String asString() { + return value; + } + + public static StringValue fromString(String value) { + return new StringValue(value); + } + } Index: oak-jcr/pom.xml =================================================================== --- oak-jcr/pom.xml (revision 1633090) +++ oak-jcr/pom.xml (working copy) @@ -296,7 +296,7 @@ com.h2database h2 - 1.3.175 + 1.4.182 test Index: oak-mk/pom.xml =================================================================== --- oak-mk/pom.xml (revision 1633090) +++ oak-mk/pom.xml (working copy) @@ -114,7 +114,7 @@ com.h2database h2 - 1.3.175 + 1.4.182 true Index: oak-run/pom.xml =================================================================== --- oak-run/pom.xml (revision 1633090) +++ oak-run/pom.xml (working copy) @@ -147,7 +147,7 @@ com.h2database h2 - 1.3.175 + 1.4.182 org.mongodb Index: oak-upgrade/pom.xml =================================================================== --- oak-upgrade/pom.xml (revision 1633090) +++ oak-upgrade/pom.xml (working copy) @@ -96,7 +96,7 @@ com.h2database h2 - 1.3.175 + 1.4.182 test