Index: pom.xml =================================================================== --- pom.xml (revision 1850821) +++ pom.xml (working copy) @@ -58,6 +58,12 @@ org.apache.jackrabbit + oak-lucene + ${project.version} + test + + + org.apache.jackrabbit oak-segment-tar ${project.version} test @@ -236,5 +242,58 @@ 1.1.1 test + + + + org.apache.lucene + lucene-core + ${lucene.version} + provided + + + org.apache.lucene + lucene-analyzers-common + ${lucene.version} + provided + + + org.apache.lucene + lucene-queryparser + ${lucene.version} + provided + + + org.apache.lucene + lucene-queries + ${lucene.version} + provided + + + org.apache.lucene + lucene-suggest + ${lucene.version} + provided + + + org.apache.lucene + lucene-highlighter + ${lucene.version} + + + org.apache.lucene + lucene-memory + ${lucene.version} + + + org.apache.lucene + lucene-misc + ${lucene.version} + + + org.apache.lucene + lucene-facet + ${lucene.version} + + Index: src/test/java/org/apache/jackrabbit/oak/composite/CompositeNodeStoreQueryTest.java =================================================================== --- src/test/java/org/apache/jackrabbit/oak/composite/CompositeNodeStoreQueryTest.java (nonexistent) +++ src/test/java/org/apache/jackrabbit/oak/composite/CompositeNodeStoreQueryTest.java (working copy) @@ -0,0 +1,165 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.jackrabbit.oak.composite; + +import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE; +import static org.apache.jackrabbit.oak.api.Type.NAME; +import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_DEFINITIONS_NAME; +import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_DEFINITIONS_NODE_TYPE; +import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.TYPE_PROPERTY_NAME; +import static org.apache.jackrabbit.oak.plugins.index.IndexUtils.createIndexDefinition; +import static org.apache.jackrabbit.oak.plugins.memory.PropertyStates.createProperty; +import static org.hamcrest.CoreMatchers.containsString; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThat; + +import javax.jcr.query.Query; + +import org.apache.jackrabbit.oak.api.Type; +import org.apache.jackrabbit.oak.plugins.index.IndexUpdateProvider; +import org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexEditorProvider; +import org.apache.jackrabbit.oak.plugins.index.reference.NodeReferenceConstants; +import org.apache.jackrabbit.oak.plugins.index.reference.ReferenceEditorProvider; +import org.apache.jackrabbit.oak.spi.commit.CommitInfo; +import org.apache.jackrabbit.oak.spi.commit.EditorHook; +import org.apache.jackrabbit.oak.spi.state.NodeBuilder; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import com.google.common.collect.ImmutableSet; + +@RunWith(Parameterized.class) +public class CompositeNodeStoreQueryTest extends CompositeNodeStoreQueryTestBase { + + public CompositeNodeStoreQueryTest(NodeStoreKind root, NodeStoreKind mounts) { + super(root, mounts); + } + + @Test + public void propertyIndex() throws Exception { + + // create an index in both the read-only and the read-write store + NodeBuilder b; + NodeBuilder readOnlyBuilder = readOnlyStore.getRoot().builder(); + b = createIndexDefinition(readOnlyBuilder.child(INDEX_DEFINITIONS_NAME), "foo", + true, false, ImmutableSet.of("foo"), null); + b.setProperty("excludedPaths", "/jcr:system"); + NodeBuilder globalBuilder = globalStore.getRoot().builder(); + b = createIndexDefinition(globalBuilder.child(INDEX_DEFINITIONS_NAME), "foo", + true, false, ImmutableSet.of("foo"), null); + b.setProperty("excludedPaths", "/jcr:system"); + EditorHook hook = new EditorHook( + new IndexUpdateProvider(new PropertyIndexEditorProvider().with(mip))); + readOnlyStore.merge(readOnlyBuilder, hook, CommitInfo.EMPTY); + globalStore.merge(globalBuilder, hook, CommitInfo.EMPTY); + root.commit(); + + // add nodes in the read-only area + NodeBuilder builder; + builder = readOnlyStore.getRoot().builder(); + for (int i = 0; i < 3; i++) { + builder.child("readOnly").child("node-" + i).setProperty("foo", "bar"); + } + readOnlyStore.merge(builder, hook, CommitInfo.EMPTY); + root.commit(); + + // run a query + session = createRepository(store).login(null, null); + root = session.getLatestRoot(); + qe = root.getQueryEngine(); + assertThat(executeQuery("explain /jcr:root//*[@foo = 'bar']", "xpath", false).toString(), + containsString("/* property foo = bar")); + assertEquals("[/readOnly/node-0, /readOnly/node-1, /readOnly/node-2]", + executeQuery("/jcr:root//*[@foo = 'bar']", "xpath").toString()); + + // add nodes in the read-write area + builder = store.getRoot().builder(); + for (int i = 0; i < 3; i++) { + builder.child("content").child("node-" + i).setProperty("foo", "bar"); + } + store.merge(builder, hook, CommitInfo.EMPTY); + root.commit(); + + // run a query + assertEquals("[/content/node-0, /content/node-1, /content/node-2, " + + "/readOnly/node-0, /readOnly/node-1, /readOnly/node-2]", + executeQuery("/jcr:root//*[@foo = 'bar']", "xpath").toString()); + assertThat(executeQuery("explain /jcr:root/content//*[@foo = 'bar']", "xpath", false).toString(), + containsString("/* property foo = bar")); + + // remove all data + builder = store.getRoot().builder(); + builder.child("content").remove(); + store.merge(builder, hook, CommitInfo.EMPTY); + root.commit(); + + // run a query + assertEquals("[]", + executeQuery("/jcr:root/content//*[@foo = 'bar']", "xpath").toString()); + } + + @Test + public void referenceIndex() throws Exception { + // create an index in both the read-only and the read-write store + NodeBuilder b; + NodeBuilder readOnlyBuilder = readOnlyStore.getRoot().builder(); + b = readOnlyBuilder.child(INDEX_DEFINITIONS_NAME).child("reference"); + b.setProperty(JCR_PRIMARYTYPE, INDEX_DEFINITIONS_NODE_TYPE, NAME); + b.setProperty(TYPE_PROPERTY_NAME, NodeReferenceConstants.TYPE); + + NodeBuilder globalBuilder = globalStore.getRoot().builder(); + b = globalBuilder.child(INDEX_DEFINITIONS_NAME).child("reference"); + b.setProperty(JCR_PRIMARYTYPE, INDEX_DEFINITIONS_NODE_TYPE, NAME); + b.setProperty(TYPE_PROPERTY_NAME, NodeReferenceConstants.TYPE); + + EditorHook hook = new EditorHook( + new IndexUpdateProvider(new ReferenceEditorProvider().with(mip))); + readOnlyStore.merge(readOnlyBuilder, hook, CommitInfo.EMPTY); + globalStore.merge(globalBuilder, hook, CommitInfo.EMPTY); + root.commit(); + + NodeBuilder builder; + builder = readOnlyStore.getRoot().builder(); + for (int i = 0; i < 3; i++) { + builder.child("readOnly").child("node-" + i).setProperty(createProperty("foo", "u1", Type.REFERENCE)); + } + readOnlyStore.merge(builder, hook, CommitInfo.EMPTY); + root.commit(); + + builder = store.getRoot().builder(); + builder.child("a").child("x").setProperty(createProperty("foo", "u1", Type.REFERENCE)); + store.merge(builder, hook, CommitInfo.EMPTY); + root.commit(); + + // run a query + session = createRepository(store).login(null, null); + root = session.getLatestRoot(); + qe = root.getQueryEngine(); + assertThat(executeQuery("explain select * from [nt:base] " + + "where property([*], 'Reference') = cast('u1' as reference)", Query.JCR_SQL2, false).toString(), + containsString("/* reference ")); + // expected: also /readOnly/node-0 .. 2 + assertEquals("[/a/x, /readOnly/node-0, /readOnly/node-1, /readOnly/node-2]", + executeQuery("select [jcr:path] from [nt:base] " + + "where property([*], 'Reference') = cast('u1' as reference)", Query.JCR_SQL2).toString()); + + } + +} Index: src/test/java/org/apache/jackrabbit/oak/composite/CompositeNodeStoreQueryTestBase.java =================================================================== --- src/test/java/org/apache/jackrabbit/oak/composite/CompositeNodeStoreQueryTestBase.java (nonexistent) +++ src/test/java/org/apache/jackrabbit/oak/composite/CompositeNodeStoreQueryTestBase.java (working copy) @@ -0,0 +1,463 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.jackrabbit.oak.composite; + +import static com.google.common.collect.Lists.newArrayList; +import static org.apache.jackrabbit.oak.api.QueryEngine.NO_BINDINGS; +import static org.apache.jackrabbit.oak.api.QueryEngine.NO_MAPPINGS; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.Closeable; +import java.io.File; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import javax.sql.DataSource; + +import org.apache.commons.io.FileUtils; +import org.apache.jackrabbit.oak.InitialContent; +import org.apache.jackrabbit.oak.Oak; +import org.apache.jackrabbit.oak.api.ContentRepository; +import org.apache.jackrabbit.oak.api.ContentSession; +import org.apache.jackrabbit.oak.api.PropertyValue; +import org.apache.jackrabbit.oak.api.QueryEngine; +import org.apache.jackrabbit.oak.api.Result; +import org.apache.jackrabbit.oak.api.ResultRow; +import org.apache.jackrabbit.oak.api.Root; +import org.apache.jackrabbit.oak.api.Type; +import org.apache.jackrabbit.oak.plugins.document.DocumentMK; +import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore; +import org.apache.jackrabbit.oak.plugins.document.rdb.RDBDataSourceFactory; +import org.apache.jackrabbit.oak.plugins.document.rdb.RDBOptions; +import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexEditorProvider; +import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexProvider; +import org.apache.jackrabbit.oak.plugins.index.nodetype.NodeTypeIndexProvider; +import org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexEditorProvider; +import org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexProvider; +import org.apache.jackrabbit.oak.plugins.index.reference.ReferenceEditorProvider; +import org.apache.jackrabbit.oak.plugins.index.reference.ReferenceIndexProvider; +import org.apache.jackrabbit.oak.plugins.memory.MemoryNodeStore; +import org.apache.jackrabbit.oak.segment.SegmentNodeStore; +import org.apache.jackrabbit.oak.segment.SegmentNodeStoreBuilders; +import org.apache.jackrabbit.oak.segment.file.FileStore; +import org.apache.jackrabbit.oak.segment.file.FileStoreBuilder; +import org.apache.jackrabbit.oak.spi.blob.BlobStore; +import org.apache.jackrabbit.oak.spi.blob.FileBlobStore; +import org.apache.jackrabbit.oak.spi.commit.CommitInfo; +import org.apache.jackrabbit.oak.spi.commit.EmptyHook; +import org.apache.jackrabbit.oak.spi.commit.Observer; +import org.apache.jackrabbit.oak.spi.mount.MountInfoProvider; +import org.apache.jackrabbit.oak.spi.mount.Mounts; +import org.apache.jackrabbit.oak.spi.query.QueryConstants; +import org.apache.jackrabbit.oak.spi.query.QueryIndexProvider; +import org.apache.jackrabbit.oak.spi.security.OpenSecurityProvider; +import org.apache.jackrabbit.oak.spi.state.NodeBuilder; +import org.apache.jackrabbit.oak.spi.state.NodeStore; +import org.jetbrains.annotations.Nullable; +import org.junit.After; +import org.junit.Before; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +import com.google.common.collect.Lists; + +@RunWith(Parameterized.class) +public class CompositeNodeStoreQueryTestBase { + + private final NodeStoreKind nodeStoreRoot; + private final NodeStoreKind mounts; + + private final List registrations = newArrayList(); + + private NodeStore mountedStore; + private NodeStore deepMountedStore; + + protected NodeStore readOnlyStore; + + // the composite store (containing read-write and read-only stores) + protected CompositeNodeStore store; + + // the global store (read-write) + protected NodeStore globalStore; + + protected NodeStore emptyStore; + + protected MountInfoProvider mip; + + protected QueryEngine qe; + protected ContentSession session; + protected Root root; + + @Parameters(name="Root: {0}, Mounts: {1}") + public static Collection data() { + return Arrays.asList(new Object[][] { + { NodeStoreKind.MEMORY, NodeStoreKind.MEMORY }, +// { NodeStoreKind.SEGMENT, NodeStoreKind.SEGMENT}, +// { NodeStoreKind.DOCUMENT_H2, NodeStoreKind.DOCUMENT_H2}, +// { NodeStoreKind.DOCUMENT_H2, NodeStoreKind.SEGMENT} + }); + } + + public CompositeNodeStoreQueryTestBase(NodeStoreKind root, NodeStoreKind mounts) { + this.nodeStoreRoot = root; + this.mounts = mounts; + } + + @Before + public void initStore() throws Exception { + + globalStore = register(nodeStoreRoot.create(null)); + mountedStore = register(mounts.create("temp")); + deepMountedStore = register(mounts.create("deep")); + readOnlyStore = register(mounts.create("readOnly")); + emptyStore = register(mounts.create("empty")); // this NodeStore will always be empty + + // create a property on the root node + NodeBuilder builder = globalStore.getRoot().builder(); + builder.setProperty("prop", "val"); + globalStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); + + assertTrue(globalStore.getRoot().hasProperty("prop")); + + // create a different sub-tree on the root store + builder = globalStore.getRoot().builder(); + NodeBuilder libsBuilder = builder.child("libs"); + libsBuilder.child("first"); + libsBuilder.child("second"); + + // create an empty /apps node with a property + builder.child("apps").setProperty("prop", "val"); + + globalStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); + + assertThat(globalStore.getRoot().getChildNodeCount(10), equalTo(2l)); + + // create a /tmp child on the mounted store and set a property + builder = mountedStore.getRoot().builder(); + NodeBuilder tmpBuilder = builder.child("tmp"); + tmpBuilder.setProperty("prop1", "val1"); + tmpBuilder.child("child1").setProperty("prop1", "val1"); + tmpBuilder.child("child2"); + + mountedStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); + + assertTrue(mountedStore.getRoot().hasChildNode("tmp")); + assertThat(mountedStore.getRoot().getChildNode("tmp").getChildNodeCount(10), equalTo(2l)); + + // populate /libs/mount/third in the deep mount, and include a property + + builder = deepMountedStore.getRoot().builder(); + builder.child("libs").child("mount").child("third").setProperty("mounted", "true"); + + deepMountedStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); + + assertTrue(deepMountedStore.getRoot().getChildNode("libs").getChildNode("mount").getChildNode("third").hasProperty("mounted")); + + // populate /readonly with a single node + builder = readOnlyStore.getRoot().builder(); + builder.child("readOnly"); + + readOnlyStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); + + initMounts(); + } + + void initMounts() throws Exception { + mip = Mounts.newBuilder() + .readOnlyMount("temp", "/tmp") + .readOnlyMount("deep", "/libs/mount") + .readOnlyMount("empty", "/nowhere") + .readOnlyMount("readOnly", "/readOnly") + .build(); + + // don't use the builder since it would fail due to too many read-write stores + // but for the purposes of testing the general correctness it's fine + List nonDefaultStores = Lists.newArrayList(); + nonDefaultStores.add(new MountedNodeStore(mip.getMountByName("temp"), mountedStore)); + nonDefaultStores.add(new MountedNodeStore(mip.getMountByName("deep"), deepMountedStore)); + nonDefaultStores.add(new MountedNodeStore(mip.getMountByName("empty"), emptyStore)); + nonDefaultStores.add(new MountedNodeStore(mip.getMountByName("readOnly"), readOnlyStore)); + store = new CompositeNodeStore(mip, globalStore, nonDefaultStores); + + session = createRepository(store).login(null, null); + root = session.getLatestRoot(); + qe = root.getQueryEngine(); + } + + protected ContentRepository createRepository(NodeStore store) { + return getOakRepo(store).createContentRepository(); + } + + Oak getOakRepo(NodeStore store) { + LuceneIndexProvider provider = new LuceneIndexProvider(); + return new Oak(store).with(new InitialContent()) + .with(new OpenSecurityProvider()) + .with((QueryIndexProvider) new LuceneIndexProvider()) + .with((Observer) provider) + .with(new PropertyIndexEditorProvider().with(mip)) + .with(new PropertyIndexProvider().with(mip)) + .with(new NodeTypeIndexProvider().with(mip)) + .with(new ReferenceEditorProvider().with(mip)) + .with(new ReferenceIndexProvider().with(mip)) + .with(new LuceneIndexEditorProvider()); + } + + protected List executeQuery(String query, String language) { + boolean pathsOnly = false; + if (language.equals("xpath")) { + pathsOnly = true; + } + return executeQuery(query, language, pathsOnly); + } + + protected List executeQuery(String query, String language, boolean pathsOnly) { + return executeQuery(query, language, pathsOnly, false); + } + + protected List executeQuery(String query, String language, boolean pathsOnly, boolean skipSort) { + long time = System.currentTimeMillis(); + List lines = new ArrayList(); + try { + Result result = executeQuery(query, language, NO_BINDINGS); + for (ResultRow row : result.getRows()) { + String r = readRow(row, pathsOnly); + if (query.startsWith("explain ")) { + r = formatPlan(r); + } + lines.add(r); + } + if (!query.contains("order by") && !skipSort) { + Collections.sort(lines); + } + } catch (ParseException e) { + lines.add(e.toString()); + } catch (IllegalArgumentException e) { + lines.add(e.toString()); + } + time = System.currentTimeMillis() - time; + if (time > 5 * 60 * 1000 && !isDebugModeEnabled()) { + // more than 5 minutes + fail("Query took too long: " + query + " took " + time + " ms"); + } + return lines; + } + + /** + * Check whether the test is running in debug mode. + * + * @return true if debug most is (most likely) enabled + */ + protected static boolean isDebugModeEnabled() { + return java.lang.management.ManagementFactory.getRuntimeMXBean() + .getInputArguments().toString().indexOf("-agentlib:jdwp") > 0; + } + + static String formatPlan(String plan) { + plan = plan.replaceAll(" where ", "\n where "); + plan = plan.replaceAll(" inner join ", "\n inner join "); + plan = plan.replaceAll(" on ", "\n on "); + plan = plan.replaceAll(" and ", "\n and "); + return plan; + } + + protected static String readRow(ResultRow row, boolean pathOnly) { + if (pathOnly) { + return row.getValue(QueryConstants.JCR_PATH).getValue(Type.STRING); + } + StringBuilder buff = new StringBuilder(); + PropertyValue[] values = row.getValues(); + for (int i = 0; i < values.length; i++) { + if (i > 0) { + buff.append(", "); + } + PropertyValue v = values[i]; + if (v == null) { + buff.append("null"); + } else if (v.isArray()) { + buff.append('['); + for (int j = 0; j < v.count(); j++) { + buff.append(v.getValue(Type.STRING, j)); + if (j > 0) { + buff.append(", "); + } + } + buff.append(']'); + } else { + buff.append(v.getValue(Type.STRING)); + } + } + return buff.toString(); + } + + protected Result executeQuery(String statement, String language, + Map sv) throws ParseException { + return qe.executeQuery(statement, language, sv, NO_MAPPINGS); + } + + @After + public void closeRepositories() throws Exception { + for ( NodeStoreRegistration reg : registrations ) { + reg.close(); + } + } + + static enum NodeStoreKind { + MEMORY { + @Override + public NodeStoreRegistration create(String name) { + return new NodeStoreRegistration() { + + private MemoryNodeStore instance; + + @Override + public NodeStore get() { + + if (instance != null) { + throw new IllegalStateException("instance already created"); + } + + instance = new MemoryNodeStore(); + + return instance; + } + + @Override + public void close() throws Exception { + // does nothing + + } + }; + } + + public boolean supportsBlobCreation() { + return false; + } + }, SEGMENT { + @Override + public NodeStoreRegistration create(final String name) { + return new NodeStoreRegistration() { + + private SegmentNodeStore instance; + private FileStore store; + private File storePath; + private String blobStorePath; + + @Override + public NodeStore get() throws Exception { + + if (instance != null) { + throw new IllegalStateException("instance already created"); + } + + // TODO - don't use Unix directory separators + String directoryName = name != null ? "segment-" + name : "segment"; + storePath = new File("target/classes/" + directoryName); + + String blobStoreDirectoryName = name != null ? "blob-" + name : "blob"; + blobStorePath = "target/classes/" + blobStoreDirectoryName; + + BlobStore blobStore = new FileBlobStore(blobStorePath); + + store = FileStoreBuilder.fileStoreBuilder(storePath).withBlobStore(blobStore).build(); + instance = SegmentNodeStoreBuilders.builder(store).build(); + + return instance; + } + + @Override + public void close() throws Exception { + store.close(); + + FileUtils.deleteQuietly(storePath); + FileUtils.deleteQuietly(new File(blobStorePath)); + } + }; + } + }, DOCUMENT_H2 { + + // TODO - copied from DocumentRdbFixture + + private DataSource ds; + + @Override + public NodeStoreRegistration create(final String name) { + + return new NodeStoreRegistration() { + + private DocumentNodeStore instance; + + @Override + public NodeStore get() throws Exception { + RDBOptions options = new RDBOptions().dropTablesOnClose(true); + String jdbcUrl = "jdbc:h2:file:./target/classes/document"; + if ( name != null ) { + jdbcUrl += "-" + name; + } + ds = RDBDataSourceFactory.forJdbcUrl(jdbcUrl, "sa", ""); + + instance = new DocumentMK.Builder() + .setRDBConnection(ds, options).build(); + instance.setMaxBackOffMillis(0); + + return instance; + + } + + @Override + public void close() throws Exception { + instance.dispose(); + if ( ds instanceof Closeable ) { + ((Closeable) ds).close(); + } + } + + }; + + } + }; + + public abstract NodeStoreRegistration create(@Nullable String name); + + public boolean supportsBlobCreation() { + return true; + } + } + + private interface NodeStoreRegistration { + NodeStore get() throws Exception; + + void close() throws Exception; + } + + private NodeStore register(NodeStoreRegistration reg) throws Exception { + registrations.add(reg); + + return reg.get(); + } + +}