From 71463feda7455ce67f5ba81c029a6f8b7a5a24f1 Mon Sep 17 00:00:00 2001 From: Philipp Marx Date: Mon, 3 Sep 2012 11:57:18 +0200 Subject: [PATCH] MongoDB-based MicroKernel fro Oak --- oak-mongomk-api/pom.xml | 42 + .../apache/jackrabbit/mongomk/api/BlobStore.java | 73 + .../apache/jackrabbit/mongomk/api/NodeStore.java | 118 ++ .../mongomk/api/command/AbstractCommand.java | 42 + .../jackrabbit/mongomk/api/command/Command.java | 92 ++ .../mongomk/api/command/CommandExecutor.java | 45 + .../jackrabbit/mongomk/api/model/Commit.java | 84 ++ .../jackrabbit/mongomk/api/model/Instruction.java | 143 ++ .../mongomk/api/model/InstructionVisitor.java | 74 + .../apache/jackrabbit/mongomk/api/model/Node.java | 100 ++ oak-mongomk-impl/pom.xml | 73 + .../jackrabbit/mongomk/impl/MongoMicroKernel.java | 213 +++ .../apache/jackrabbit/mongomk/impl/NodeFilter.java | 88 ++ .../mongomk/impl/builder/CommitBuilder.java | 114 ++ .../mongomk/impl/builder/NodeBuilder.java | 125 ++ .../mongomk/impl/command/CommandExecutorImpl.java | 55 + .../mongomk/impl/json/DefaultJsopHandler.java | 92 ++ .../jackrabbit/mongomk/impl/json/JsonUtil.java | 141 ++ .../jackrabbit/mongomk/impl/json/JsopParser.java | 202 +++ .../mongomk/impl/model/AddNodeInstructionImpl.java | 61 + .../impl/model/AddPropertyInstructionImpl.java | 78 ++ .../jackrabbit/mongomk/impl/model/CommitImpl.java | 105 ++ .../impl/model/CopyNodeInstructionImpl.java | 64 + .../impl/model/MoveNodeInstructionImpl.java | 52 + .../jackrabbit/mongomk/impl/model/NodeImpl.java | 324 +++++ .../impl/model/RemoveNodeInstructionImpl.java | 61 + .../impl/model/SetPropertyInstructionImpl.java | 78 ++ .../mongomk/builder/CommitBuilderImplTest.java | 129 ++ .../mongomk/builder/NodeBuilderTest.java | 61 + .../jackrabbit/mongomk/impl/InstructionAssert.java | 74 + .../apache/jackrabbit/mongomk/impl/NodeAssert.java | 117 ++ .../mongomk/impl/json/JsopParserTest.java | 543 ++++++++ oak-mongomk-perf/pom.xml | 98 ++ .../jackrabbit/mongomk/perf/BlobStoreFS.java | 55 + .../org/apache/jackrabbit/mongomk/perf/Config.java | 56 + .../jackrabbit/mongomk/perf/MicroKernelPerf.java | 113 ++ .../mongomk/perf/MicroKernelPerfClient.java | 220 +++ .../mongomk/perf/MicroKernelPerfMaster.java | 257 ++++ .../mongomk/perf/PrepareEnvironment.java | 50 + .../mongomk/perf/RandomJsopGenerator.java | 190 +++ .../write/MultipleMksWriteNodesTest.java | 126 ++ .../performance/write/MultipleNodesTestBase.java | 59 + .../mongomk/performance/write/TestUtil.java | 71 + .../mongomk/performance/write/WriteNodesTest.java | 85 ++ oak-mongomk-perf/src/main/resources/config.cfg | 21 + oak-mongomk-perf/src/main/resources/log4j.cfg | 22 + oak-mongomk-test/pom.xml | 69 + .../mongomk/test/it/MongoDataStoreIT.java | 28 + .../mongomk/test/it/MongoEverythingIT.java | 28 + .../mongomk/test/it/MongoMicroKernelFixture.java | 85 ++ .../mongomk/test/it/MongoMicroKernelIT.java | 28 + ...rg.apache.jackrabbit.mk.test.MicroKernelFixture | 16 + oak-mongomk-test/src/test/resources/config.cfg | 18 + oak-mongomk/pom.xml | 90 ++ .../apache/jackrabbit/mongomk/BlobStoreMongo.java | 56 + .../apache/jackrabbit/mongomk/MongoConnection.java | 107 ++ .../apache/jackrabbit/mongomk/NodeStoreMongo.java | 206 +++ .../mongomk/command/CommitCommandMongo.java | 289 ++++ .../command/ConflictingCommitException.java | 38 + .../mongomk/command/GetBlobLengthCommandMongo.java | 45 + .../command/GetHeadRevisionCommandMongo.java | 56 + .../mongomk/command/GetNodesCommandMongo.java | 241 ++++ .../mongomk/command/NodeExistsCommandMongo.java | 92 ++ .../mongomk/command/ReadBlobCommandMongo.java | 75 + .../mongomk/command/WriteBlobCommandMongo.java | 68 + .../model/CommitCommandInstructionVisitor.java | 261 ++++ .../jackrabbit/mongomk/model/CommitMongo.java | 155 +++ .../apache/jackrabbit/mongomk/model/HeadMongo.java | 60 + .../apache/jackrabbit/mongomk/model/NodeMongo.java | 270 ++++ .../jackrabbit/mongomk/query/AbstractQuery.java | 52 + .../jackrabbit/mongomk/query/FetchHeadQuery.java | 48 + .../mongomk/query/FetchHeadRevisionQuery.java | 51 + .../mongomk/query/FetchNodeByPathQuery.java | 103 ++ .../query/FetchNodesByPathAndDepthQuery.java | 120 ++ .../mongomk/query/FetchNodesForRevisionQuery.java | 99 ++ .../mongomk/query/FetchValidCommitsQuery.java | 140 ++ .../mongomk/query/FetchValidRevisionsQuery.java | 62 + .../jackrabbit/mongomk/query/QueryUtils.java | 90 ++ .../mongomk/query/ReadAndIncHeadRevisionQuery.java | 57 + .../mongomk/query/SaveAndSetHeadRevisionQuery.java | 67 + .../jackrabbit/mongomk/query/SaveCommitQuery.java | 58 + .../jackrabbit/mongomk/query/SaveNodesQuery.java | 63 + .../apache/jackrabbit/mongomk/util/MongoUtil.java | 109 ++ .../src/test/java/com/mongodb/DBCollection.java | 1460 ++++++++++++++++++++ .../apache/jackrabbit/mongomk/BaseMongoTest.java | 78 ++ .../org/apache/jackrabbit/mongomk/MongoAssert.java | 132 ++ .../mongomk/command/CommitCommandMongoTest.java | 377 +++++ .../command/ConcurrentCommitCommandMongoTest.java | 136 ++ .../command/GetBlobLengthCommandMongoTest.java | 73 + .../command/GetHeadRevisionCommandMongoTest.java | 43 + .../mongomk/command/GetNodesCommandMongoTest.java | 300 ++++ .../command/NodeExistsCommandMongoTest.java | 231 +++ .../mongomk/command/ReadBlobCommandMongoTest.java | 86 ++ .../mongomk/command/WriteBlobCommandMongoTest.java | 65 + .../query/FetchNodesByPathAndDepthQueryTest.java | 218 +++ .../query/FetchNodesForRevisionQueryTest.java | 188 +++ .../mongomk/query/FetchValidCommitsQueryTest.java | 125 ++ .../mongomk/scenario/SimpleNodeScenario.java | 147 ++ oak-mongomk/src/test/resources/config.cfg | 28 + pom.xml | 10 + 100 files changed, 12283 insertions(+), 0 deletions(-) create mode 100644 oak-mongomk-api/pom.xml create mode 100644 oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/BlobStore.java create mode 100644 oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/NodeStore.java create mode 100644 oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/command/AbstractCommand.java create mode 100644 oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/command/Command.java create mode 100644 oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/command/CommandExecutor.java create mode 100644 oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/model/Commit.java create mode 100644 oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/model/Instruction.java create mode 100644 oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/model/InstructionVisitor.java create mode 100644 oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/model/Node.java create mode 100644 oak-mongomk-impl/pom.xml create mode 100644 oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/MongoMicroKernel.java create mode 100644 oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/NodeFilter.java create mode 100644 oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/builder/CommitBuilder.java create mode 100644 oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/builder/NodeBuilder.java create mode 100644 oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/command/CommandExecutorImpl.java create mode 100644 oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/json/DefaultJsopHandler.java create mode 100644 oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/json/JsonUtil.java create mode 100644 oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/json/JsopParser.java create mode 100644 oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/model/AddNodeInstructionImpl.java create mode 100644 oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/model/AddPropertyInstructionImpl.java create mode 100644 oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/model/CommitImpl.java create mode 100644 oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/model/CopyNodeInstructionImpl.java create mode 100644 oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/model/MoveNodeInstructionImpl.java create mode 100644 oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/model/NodeImpl.java create mode 100644 oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/model/RemoveNodeInstructionImpl.java create mode 100644 oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/model/SetPropertyInstructionImpl.java create mode 100644 oak-mongomk-impl/src/test/java/org/apache/jackrabbit/mongomk/builder/CommitBuilderImplTest.java create mode 100644 oak-mongomk-impl/src/test/java/org/apache/jackrabbit/mongomk/builder/NodeBuilderTest.java create mode 100644 oak-mongomk-impl/src/test/java/org/apache/jackrabbit/mongomk/impl/InstructionAssert.java create mode 100644 oak-mongomk-impl/src/test/java/org/apache/jackrabbit/mongomk/impl/NodeAssert.java create mode 100644 oak-mongomk-impl/src/test/java/org/apache/jackrabbit/mongomk/impl/json/JsopParserTest.java create mode 100644 oak-mongomk-perf/pom.xml create mode 100644 oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/perf/BlobStoreFS.java create mode 100644 oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/perf/Config.java create mode 100644 oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/perf/MicroKernelPerf.java create mode 100644 oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/perf/MicroKernelPerfClient.java create mode 100644 oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/perf/MicroKernelPerfMaster.java create mode 100644 oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/perf/PrepareEnvironment.java create mode 100644 oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/perf/RandomJsopGenerator.java create mode 100644 oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/performance/write/MultipleMksWriteNodesTest.java create mode 100644 oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/performance/write/MultipleNodesTestBase.java create mode 100644 oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/performance/write/TestUtil.java create mode 100644 oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/performance/write/WriteNodesTest.java create mode 100644 oak-mongomk-perf/src/main/resources/config.cfg create mode 100644 oak-mongomk-perf/src/main/resources/log4j.cfg create mode 100644 oak-mongomk-test/pom.xml create mode 100644 oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoDataStoreIT.java create mode 100644 oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoEverythingIT.java create mode 100644 oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoMicroKernelFixture.java create mode 100644 oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoMicroKernelIT.java create mode 100644 oak-mongomk-test/src/test/resources/META-INF/services/org.apache.jackrabbit.mk.test.MicroKernelFixture create mode 100644 oak-mongomk-test/src/test/resources/config.cfg create mode 100644 oak-mongomk/pom.xml create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/BlobStoreMongo.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/MongoConnection.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/NodeStoreMongo.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/CommitCommandMongo.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/ConflictingCommitException.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/GetBlobLengthCommandMongo.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/GetHeadRevisionCommandMongo.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/GetNodesCommandMongo.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/NodeExistsCommandMongo.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/ReadBlobCommandMongo.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/WriteBlobCommandMongo.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/model/CommitCommandInstructionVisitor.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/model/CommitMongo.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/model/HeadMongo.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/model/NodeMongo.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/AbstractQuery.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/FetchHeadQuery.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/FetchHeadRevisionQuery.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/FetchNodeByPathQuery.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/FetchNodesByPathAndDepthQuery.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/FetchNodesForRevisionQuery.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/FetchValidCommitsQuery.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/FetchValidRevisionsQuery.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/QueryUtils.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/ReadAndIncHeadRevisionQuery.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/SaveAndSetHeadRevisionQuery.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/SaveCommitQuery.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/SaveNodesQuery.java create mode 100644 oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/util/MongoUtil.java create mode 100644 oak-mongomk/src/test/java/com/mongodb/DBCollection.java create mode 100644 oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/BaseMongoTest.java create mode 100644 oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/MongoAssert.java create mode 100644 oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/CommitCommandMongoTest.java create mode 100644 oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/ConcurrentCommitCommandMongoTest.java create mode 100644 oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/GetBlobLengthCommandMongoTest.java create mode 100644 oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/GetHeadRevisionCommandMongoTest.java create mode 100644 oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/GetNodesCommandMongoTest.java create mode 100644 oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/NodeExistsCommandMongoTest.java create mode 100644 oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/ReadBlobCommandMongoTest.java create mode 100644 oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/WriteBlobCommandMongoTest.java create mode 100644 oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/query/FetchNodesByPathAndDepthQueryTest.java create mode 100644 oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/query/FetchNodesForRevisionQueryTest.java create mode 100644 oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/query/FetchValidCommitsQueryTest.java create mode 100644 oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/scenario/SimpleNodeScenario.java create mode 100644 oak-mongomk/src/test/resources/config.cfg diff --git a/oak-mongomk-api/pom.xml b/oak-mongomk-api/pom.xml new file mode 100644 index 0000000..1dab930 --- /dev/null +++ b/oak-mongomk-api/pom.xml @@ -0,0 +1,42 @@ + + + + + + 4.0.0 + + + org.apache.jackrabbit + oak-parent + 0.5-SNAPSHOT + + + oak-mongomk-api + + + + org.apache.jackrabbit + oak-mk + ${project.version} + + + + diff --git a/oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/BlobStore.java b/oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/BlobStore.java new file mode 100644 index 0000000..e3dcec4 --- /dev/null +++ b/oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/BlobStore.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.api; + +import java.io.InputStream; + +import org.apache.jackrabbit.mk.api.MicroKernel; + +/** + * The BlobStore interface deals with all blob related operations of the {@link MicroKernel}. + * + *

+ * Since binary storage and node storage most likely use different backend technologies two separate interfaces for + * these operations are provided. + *

+ * + *

+ * This interface is not only a partly {@code MicroKernel} but also provides a different layer of abstraction by + * converting the {@link String} parameters into higher level objects to ease the development for implementors of the + * {@code MicroKernel}. + *

+ * + * @see NodeStore + * + * @author NodeStore interface deals with all node related operations of the {@link MicroKernel}. + * + *

+ * Since binary storage and node storage most likely use different backend technologies two separate interfaces for + * these operations are provided. + *

+ * + *

+ * This interface is not only a partly {@code MicroKernel} but also provides a different layer of abstraction by + * converting the {@link String} parameters into higher level objects to ease the development for implementors of the + * {@code MicroKernel}. + *

+ * + * @see BlobStore + * + * @author
The result type of the {@code Command}. + * + * @author + * It adds some functionality for retries and other non business logic related actions (i.e. logging, performance + * tracking, etc). + *

+ * + * @see
Command Pattern + * @see CommandExecutor + * + * @param + * The result type of the {@code Command}. + * + * @author Command Pattern. + * + *

+ * The implementation of this class contains the business logic to execute a command. + *

+ * + * @see Command Pattern + * @see Command + * + * @author JSOP diff of this commit. + * + * @return The {@link String} representing the diff. + */ + String getDiff(); + + /** + * Returns the {@link List} of {@link Instruction}s which were created from the diff. + * + * @see #getDiff() + * + * @return The {@link List} of {@link Instruction}s. + */ + List getInstructions(); + + /** + * Returns the message of the commit. + * + * @return The message. + */ + String getMessage(); + + /** + * Returns the path of the root node of this commit. + * + * @return The path of the root node. + */ + String getPath(); + + /** + * Returns the revision id of this commit if known already, else this will return {@code null}. The revision + * id will be determined only after the commit has been successfully performed. + * + * @see #setRevisionId(String) + * + * @return The revision id of this commit or {@code null}. + */ + String getRevisionId(); + + /** + * Sets the revision id of this commit. + * + * @see #getRevisionId() + * + * @param revisionId The revision id to set. + */ + void setRevisionId(String revisionId); + + + /** + * Returns the timestamp of this commit. + * + * @return The timestamp of this commit. + */ + long getTimestamp(); +} diff --git a/oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/model/Instruction.java b/oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/model/Instruction.java new file mode 100644 index 0000000..9212fb0 --- /dev/null +++ b/oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/model/Instruction.java @@ -0,0 +1,143 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.api.model; + +/** + * An {@code Instruction} is an abstraction of a single JSOP + * operation. + * + *

+ * Each operation is a concrete subinterface of {@code Instruction} and extending it by the specific properties of the + * operation. There is no exact 1 : 1 mapping between a {@code JSOP} operation and a subinterface, i.e. in {@code JSOP} + * there is one add operation for adding nodes and properties whereas there are two specific subinterfaces; one for + * adding a node and one for adding a property. + *

+ * + * @author "+" STRING ":" (ATOM | ARRAY) + */ + public interface AddPropertyInstruction extends Instruction { + + /** + * Returns the key of the property to add. + * + * @return The key. + */ + String getKey(); + + /** + * Returns the value of the property to add. + * + * @return The value. + */ + Object getValue(); + } + + /** + * The copy node operation => "*" STRING ":" STRING + */ + public interface CopyNodeInstruction extends Instruction { + + /** + * Returns the destination path. + * + * @return the destination path. + */ + String getDestPath(); + + /** + * Returns the source path. + * + * @return the source path. + */ + String getSourcePath(); + } + + /** + * The move node operation => ">" STRING ":" STRING + */ + public interface MoveNodeInstruction extends Instruction { + + /** + * Returns the destination path. + * + * @return the destination path. + */ + String getDestPath(); + + /** + * Returns the source path. + * + * @return the source path. + */ + String getSourcePath(); + } + + /** + * The remove node operation => "-" STRING + */ + public interface RemoveNodeInstruction extends Instruction { + } + + /** + * The set property operation => "^" STRING ":" ATOM | ARRAY + */ + public interface SetPropertyInstruction extends Instruction { + + /** + * Returns the key of the property to set. + * + * @return The key. + */ + String getKey(); + + /** + * Returns the value of the property to set. + * + * @return The value. + */ + Object getValue(); + } +} \ No newline at end of file diff --git a/oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/model/InstructionVisitor.java b/oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/model/InstructionVisitor.java new file mode 100644 index 0000000..c1b965c --- /dev/null +++ b/oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/model/InstructionVisitor.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.api.model; + +import org.apache.jackrabbit.mongomk.api.model.Instruction.AddNodeInstruction; +import org.apache.jackrabbit.mongomk.api.model.Instruction.AddPropertyInstruction; +import org.apache.jackrabbit.mongomk.api.model.Instruction.CopyNodeInstruction; +import org.apache.jackrabbit.mongomk.api.model.Instruction.MoveNodeInstruction; +import org.apache.jackrabbit.mongomk.api.model.Instruction.RemoveNodeInstruction; +import org.apache.jackrabbit.mongomk.api.model.Instruction.SetPropertyInstruction; + +/** + * A Visitor to iterate through a list of + * {@code Instruction}s without the need to use {@code instanceof} on each item. + */ +public interface InstructionVisitor { + + /** + * Visits a {@code AddNodeInstruction}. + * + * @param instruction + * The instruction. + */ + void visit(AddNodeInstruction instruction); + + /** + * Visits a {@code AddPropertyInstruction}. + * + * @param instruction The instruction. + */ + void visit(AddPropertyInstruction instruction); + + /** + * Visits a {@code CopyNodeInstruction}. + * + * @param instruction The instruction. + */ + void visit(CopyNodeInstruction instruction); + + /** + * Visits a {@code MoveNodeInstruction}. + * + * @param instruction The instruction. + */ + void visit(MoveNodeInstruction instruction); + + /** + * Visits a {@code RemoveNodeInstruction}. + * + * @param instruction The instruction. + */ + void visit(RemoveNodeInstruction instruction); + + /** + * Visits a {@code SetPropertyInstruction}. + * + * @param instruction The instruction. + */ + void visit(SetPropertyInstruction instruction); +} \ No newline at end of file diff --git a/oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/model/Node.java b/oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/model/Node.java new file mode 100644 index 0000000..3265fa7 --- /dev/null +++ b/oak-mongomk-api/src/main/java/org/apache/jackrabbit/mongomk/api/model/Node.java @@ -0,0 +1,100 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.api.model; + +import java.util.Iterator; +import java.util.Map; +import java.util.Set; + +/** + * A higher level object representing a node. + * + * @author + + + + + 4.0.0 + + + org.apache.jackrabbit + oak-parent + 0.5-SNAPSHOT + + + oak-mongomk-impl + + + + junit + junit + test + + + org.apache.jackrabbit + oak-mk + ${project.version} + + + org.json + json + 20090211 + + + org.apache.jackrabbit + oak-mongomk-api + ${project.version} + + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + + + diff --git a/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/MongoMicroKernel.java b/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/MongoMicroKernel.java new file mode 100644 index 0000000..ea513cd --- /dev/null +++ b/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/MongoMicroKernel.java @@ -0,0 +1,213 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.impl; + +import java.io.InputStream; + +import org.apache.jackrabbit.mk.api.MicroKernel; +import org.apache.jackrabbit.mk.api.MicroKernelException; +import org.apache.jackrabbit.mongomk.api.BlobStore; +import org.apache.jackrabbit.mongomk.api.NodeStore; +import org.apache.jackrabbit.mongomk.api.model.Commit; +import org.apache.jackrabbit.mongomk.api.model.Node; +import org.apache.jackrabbit.mongomk.impl.builder.CommitBuilder; +import org.apache.jackrabbit.mongomk.impl.json.JsonUtil; + +/** + * The {@code MongoDB} implementation of the {@link MicroKernel}. + * + *

+ * This class will transform and delegate to instances of {@link NodeStore} and {@link BlobStore}. + *

+ * + * @author
0 && nodeFilter != null && nodeFilter.getChildNodeFilter() != null) { + // Both an offset > 0 and a filter on node names have been specified... + throw new IllegalArgumentException("offset > 0 with child node filter"); + } + + try { + // FIXME [Mete] Should filter, offset, and maxChildNodes be handled in Mongo instead? + Node rootNode = nodeStore.getNodes(path, revisionId, depth, offset, maxChildNodes, filter); + if (rootNode == null) { + return null; + } + return JsonUtil.convertToJson(rootNode, depth, (int)offset, maxChildNodes, true, nodeFilter); + } catch (Exception e) { + throw new MicroKernelException(e); + } + } + + @Override + public String getRevisionHistory(long since, int maxEntries, String path) throws MicroKernelException { + return nodeStore.getRevisionHistory(since, maxEntries, path); + } + + @Override + public String merge(String branchRevisionId, String message) throws MicroKernelException { + throw new UnsupportedOperationException("Merge is currently not supported."); + } + + @Override + public boolean nodeExists(String path, String revisionId) throws MicroKernelException { + boolean exists = false; + + try { + String revId = null; + if (revisionId != null) { + revId = new String(revisionId); + } + + exists = nodeStore.nodeExists(path, revId); + } catch (Exception e) { + throw new MicroKernelException(e); + } + + return exists; + } + + @Override + public int read(String blobId, long pos, byte[] buff, int off, int length) throws MicroKernelException { + int totalBytes = -1; + + try { + totalBytes = blobStore.readBlob(blobId, pos, buff, off, length); + } catch (Exception e) { + throw new MicroKernelException(e); + } + + return totalBytes; + } + + @Override + public String waitForCommit(String oldHeadRevisionId, long timeout) throws MicroKernelException, + InterruptedException { + return nodeStore.waitForCommit(oldHeadRevisionId, timeout); + } + + @Override + public String write(InputStream in) throws MicroKernelException { + String blobId = null; + + try { + blobId = blobStore.writeBlob(in); + } catch (Exception e) { + throw new MicroKernelException(e); + } + + return blobId; + } +} \ No newline at end of file diff --git a/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/NodeFilter.java b/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/NodeFilter.java new file mode 100644 index 0000000..332204e --- /dev/null +++ b/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/NodeFilter.java @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.impl; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.jackrabbit.mk.json.JsopTokenizer; +import org.apache.jackrabbit.mk.util.NameFilter; + +/** + * FIXME [Mete] Stolen from OAK. Should go away at some point when MongoMK becomes + * part of OAK. + */ +public class NodeFilter { + + NameFilter nodeFilter; + NameFilter propFilter; + + private NodeFilter(NameFilter nodeFilter, NameFilter propFilter) { + this.nodeFilter = nodeFilter; + this.propFilter = propFilter; + } + + static NodeFilter parse(String json) { + // parse json format filter + JsopTokenizer t = new JsopTokenizer(json); + t.read('{'); + + NameFilter nodeFilter = null, propFilter = null; + + do { + String type = t.readString(); + t.read(':'); + String[] globs = parseArray(t); + if (type.equals("nodes")) { + nodeFilter = new NameFilter(globs); + } else if (type.equals("properties")) { + propFilter = new NameFilter(globs); + } else { + throw new IllegalArgumentException("illegal filter format"); + } + } while (t.matches(',')); + t.read('}'); + + return new NodeFilter(nodeFilter, propFilter); + } + + private static String[] parseArray(JsopTokenizer t) { + List l = new ArrayList(); + t.read('['); + do { + l.add(t.readString()); + } while (t.matches(',')); + t.read(']'); + return l.toArray(new String[l.size()]); + } + + NameFilter getChildNodeFilter() { + return nodeFilter; + } + + NameFilter getPropertyFilter() { + return propFilter; + } + + boolean includeNode(String name) { + return nodeFilter == null || nodeFilter.matches(name); + } + + public boolean includeProperty(String name) { + return propFilter == null || propFilter.matches(name); + } +} \ No newline at end of file diff --git a/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/builder/CommitBuilder.java b/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/builder/CommitBuilder.java new file mode 100644 index 0000000..7b63eee --- /dev/null +++ b/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/builder/CommitBuilder.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.impl.builder; + +import org.apache.jackrabbit.mongomk.api.model.Commit; +import org.apache.jackrabbit.mongomk.api.model.Instruction.AddNodeInstruction; +import org.apache.jackrabbit.mongomk.api.model.Instruction.AddPropertyInstruction; +import org.apache.jackrabbit.mongomk.api.model.Instruction.CopyNodeInstruction; +import org.apache.jackrabbit.mongomk.api.model.Instruction.MoveNodeInstruction; +import org.apache.jackrabbit.mongomk.api.model.Instruction.RemoveNodeInstruction; +import org.apache.jackrabbit.mongomk.api.model.Instruction.SetPropertyInstruction; +import org.apache.jackrabbit.mongomk.impl.json.DefaultJsopHandler; +import org.apache.jackrabbit.mongomk.impl.json.JsopParser; +import org.apache.jackrabbit.mongomk.impl.model.AddNodeInstructionImpl; +import org.apache.jackrabbit.mongomk.impl.model.AddPropertyInstructionImpl; +import org.apache.jackrabbit.mongomk.impl.model.CommitImpl; +import org.apache.jackrabbit.mongomk.impl.model.CopyNodeInstructionImpl; +import org.apache.jackrabbit.mongomk.impl.model.MoveNodeInstructionImpl; +import org.apache.jackrabbit.mongomk.impl.model.RemoveNodeInstructionImpl; +import org.apache.jackrabbit.mongomk.impl.model.SetPropertyInstructionImpl; + +/** + * A builder to convert a JSOP diff into a {@link Commit}. + * + * @author JSON + * strings. + * + * @author properties = null; + for (@SuppressWarnings("rawtypes") + Iterator iterator = jsonObject.keys(); iterator.hasNext();) { + String key = (String) iterator.next(); + Object value = jsonObject.get(key); + + if (value instanceof JSONObject) { + String childPath = PathUtils.concat(realPath, key); + + Node childNode = parseNode(childPath, (JSONObject) value); + node.addChild(childNode); + } else { + if (properties == null) { + properties = new HashMap(); + } + + Object converted = JsonUtil.convertJsonValue(value.toString()); + properties.put(key, converted); + } + } + + node.setProperties(properties); + + return node; + } +} diff --git a/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/command/CommandExecutorImpl.java b/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/command/CommandExecutorImpl.java new file mode 100644 index 0000000..03d9ba4 --- /dev/null +++ b/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/command/CommandExecutorImpl.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.impl.command; + +import org.apache.jackrabbit.mongomk.api.command.Command; +import org.apache.jackrabbit.mongomk.api.command.CommandExecutor; + +/** + * Implementation of the {@link CommandExecutor} interface. + * + * @author + * Each event callback has an empty default implementation. An implementor may choose the appropriate methods to + * overwrite. + *

+ */ +public class DefaultJsopHandler { + + /** + * Event: A node has been added. + * + * @param parentPath The path where the node was added to. + * @param name The name of the added node. + */ + public void nodeAdded(String parentPath, String name) { + // No-op + } + + /** + * Event: A node was copied. + * + * @param rootPath The root path where the copy took place. + * @param oldPath The old path of the node (relative to the root path). + * @param newPath The new path of the node (relative to the root path). + */ + public void nodeCopied(String rootPath, String oldPath, String newPath) { + // No-op + } + + /** + * Event: A node was moved. + * + * @param rootPath The root path where the copy took place. + * @param oldPath The old path of the node (relative to the root path). + * @param newPath The new path of the node (relative to the root path). + */ + public void nodeMoved(String rootPath, String oldPath, String newPath) { + // No-op + } + + /** + * Event: A node was removed. + * + * @param parentPath The path where the node was removed from. + * @param name The name of the node. + */ + public void nodeRemoved(String parentPath, String name) { + // No-op + } + + /** + * Event: A property was added. + * + * @param path The path of the node where the property was added. + * @param key The key of the property. + * @param value The value of the property. + */ + public void propertyAdded(String path, String key, Object value) { + // No-op + } + + /** + * Event: A property was set. + * + * @param path The path of the node where the property was set. + * @param key The key of the property. + * @param value The value of the property. + */ + public void propertySet(String path, String key, Object value) { + // No-op + } +} diff --git a/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/json/JsonUtil.java b/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/json/JsonUtil.java new file mode 100644 index 0000000..529c1bb --- /dev/null +++ b/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/json/JsonUtil.java @@ -0,0 +1,141 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.impl.json; + +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +import org.apache.jackrabbit.mk.json.JsopBuilder; +import org.apache.jackrabbit.mongomk.api.model.Node; +import org.apache.jackrabbit.mongomk.impl.NodeFilter; +import org.json.JSONArray; +import org.json.JSONObject; + + +/** + * FIXME - [Mete] This should really merge with MicroKernelImpl#toJson. + * + *
JSON related utility classes. + * + * @author properties = node.getProperties(); + if (properties != null) { + for (Map.Entry entry : properties.entrySet()) { + String key = entry.getKey(); + if (filter == null || filter.includeProperty(key)) { + Object value = entry.getValue(); + builder.key(key); + if (value instanceof String) { + builder.value(value.toString()); + } else { + builder.encodedValue(value.toString()); + } + } + } + } + + long childCount = node.getChildCount(); + if (inclVirtualProps) { + if (filter == null || filter.includeProperty(":childNodeCount")) { + // :childNodeCount is by default always included + // unless it is explicitly excluded in the filter + builder.key(":childNodeCount").value(childCount); + } + // FIXME [Mete] See if :hash is still being used. + /*check whether :hash has been explicitly included + if (filter != null) { + NameFilter nf = filter.getPropertyFilter(); + if (nf != null + && nf.getInclusionPatterns().contains(":hash") + && !nf.getExclusionPatterns().contains(":hash")) { + builder.key(":hash").value(rep.getRevisionStore().getId(node).toString()); + } + } + */ + } + + // FIXME [Mete] There's still some more work here. + Iterator entries = node.getChildEntries(offset, maxChildNodes); + while (entries.hasNext()) { + Node child = entries.next(); + int numSiblings = 0; + if (maxChildNodes != -1 && ++numSiblings > maxChildNodes) { + break; + } + builder.key(child.getName()); + if ((depth == -1) || (currentDepth < depth)) { + convertToJson(builder, child, depth, currentDepth + 1, offset, + maxChildNodes, inclVirtualProps, filter); + } else { + builder.object(); + builder.endObject(); + } + } + + builder.endObject(); + } + + private static Object convertJsonValue(Object jsonObject) throws Exception { + if (jsonObject == JSONObject.NULL) { + return null; + } + + if (jsonObject instanceof JSONArray) { + List elements = new LinkedList(); + JSONArray dummyArray = (JSONArray) jsonObject; + for (int i = 0; i < dummyArray.length(); ++i) { + Object raw = dummyArray.get(i); + Object parsed = convertJsonValue(raw); + elements.add(parsed); + } + return elements; + } + + return jsonObject; + } + + private JsonUtil() { + // no instantiation + } +} diff --git a/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/json/JsopParser.java b/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/json/JsopParser.java new file mode 100644 index 0000000..c2ea103 --- /dev/null +++ b/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/json/JsopParser.java @@ -0,0 +1,202 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.impl.json; + +import javax.xml.parsers.SAXParser; + +import org.apache.jackrabbit.mk.json.JsopReader; +import org.apache.jackrabbit.mk.json.JsopTokenizer; +import org.apache.jackrabbit.oak.commons.PathUtils; + +/** + * An event based parser for JSOP. + * + *

+ * This parser is similar to a {@link SAXParser} using a callback ({@code DefaultHandler}) to inform about certain + * events during parsing,i.e. node was added, node was removed, etc. This relieves the implementor from the burden of + * performing a semantic analysis of token which are being parsed. + *

+ * + *

+ * The underlying token parser is the {@link JsopTokenizer}. + *

+ * + * @author ': { + parseOpMoved(); + break; + } + case '^': { + parseOpSet(); + break; + } + case '-': { + parseOpRemoved(); + break; + } + default: + throw new IllegalStateException("Unknown operation: " + (char) token); + } + } + } + + private void parseOpAdded(String currentPath) throws Exception { + String subPath = tokenizer.readString(); + String path = PathUtils.concat(currentPath, subPath); + + tokenizer.read(':'); + + if (tokenizer.matches('{')) { + String parentPath = PathUtils.denotesRoot(path) ? "" : PathUtils.getParentPath(path); + String nodeName = PathUtils.denotesRoot(path) ? "/" : PathUtils.getName(path); + defaultHandler.nodeAdded(parentPath, nodeName); + + if (!tokenizer.matches('}')) { + do { + int pos = tokenizer.getLastPos(); + String propName = tokenizer.readString(); + tokenizer.read(':'); + + if (tokenizer.matches('{')) { // parse a nested node + tokenizer.setPos(pos); // resetting to last post b/c parseOpAdded expects the whole json + tokenizer.read(); + parseOpAdded(path); + } else { // parse property + String valueAsString = tokenizer.readRawValue().trim(); + Object value = JsonUtil.convertJsonValue(valueAsString); + + defaultHandler.propertyAdded(path, propName, value); + } + } while (tokenizer.matches(',')); + + tokenizer.read('}'); // explicitly close the bracket + } + } + } + + private void parseOpCopied() throws Exception { + int pos = tokenizer.getLastPos(); + String subPath = tokenizer.readString(); + String srcPath = PathUtils.concat(path, subPath); + if (!PathUtils.isAbsolute(srcPath)) { + throw new Exception("Absolute path expected: " + srcPath + ", pos: " + pos); + } + tokenizer.read(':'); + String targetPath = tokenizer.readString(); + if (!PathUtils.isAbsolute(targetPath)) { + targetPath = PathUtils.concat(path, targetPath); + if (!PathUtils.isAbsolute(targetPath)) { + throw new Exception("Absolute path expected: " + targetPath + ", pos: " + pos); + } + } + defaultHandler.nodeCopied(path, srcPath, targetPath); + } + + private void parseOpMoved() throws Exception { + int pos = tokenizer.getLastPos(); + String subPath = tokenizer.readString(); + String srcPath = PathUtils.concat(path, subPath); + if (!PathUtils.isAbsolute(srcPath)) { + throw new Exception("Absolute path expected: " + srcPath + ", pos: " + pos); + } + tokenizer.read(':'); + pos = tokenizer.getLastPos(); + String targetPath = tokenizer.readString(); + if (!PathUtils.isAbsolute(targetPath)) { + targetPath = PathUtils.concat(path, targetPath); + if (!PathUtils.isAbsolute(targetPath)) { + throw new Exception("absolute path expected: " + targetPath + ", pos: " + pos); + } + } + defaultHandler.nodeMoved(path, srcPath, targetPath); + } + + private void parseOpSet() throws Exception { + int pos = tokenizer.getLastPos(); + String subPath = tokenizer.readString(); + tokenizer.read(':'); + String value; + if (tokenizer.matches(JsopReader.NULL)) { + value = null; + } else { + value = tokenizer.readRawValue().trim(); + } + String targetPath = PathUtils.concat(path, subPath); + if (!PathUtils.isAbsolute(targetPath)) { + throw new Exception("Absolute path expected: " + targetPath + ", pos: " + pos); + } + String parentPath = PathUtils.getParentPath(targetPath); + String propName = PathUtils.getName(targetPath); + defaultHandler.propertySet(parentPath, propName, JsonUtil.convertJsonValue(value)); + } + + private void parseOpRemoved() throws Exception { + int pos = tokenizer.getLastPos(); + String subPath = tokenizer.readString(); + String targetPath = PathUtils.concat(path, subPath); + if (!PathUtils.isAbsolute(targetPath)) { + throw new Exception("Absolute path expected: " + targetPath + ", pos: " + pos); + } + defaultHandler.nodeRemoved(path, subPath); + } +} \ No newline at end of file diff --git a/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/model/AddNodeInstructionImpl.java b/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/model/AddNodeInstructionImpl.java new file mode 100644 index 0000000..f49efaf --- /dev/null +++ b/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/model/AddNodeInstructionImpl.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.impl.model; + +import org.apache.jackrabbit.mongomk.api.model.InstructionVisitor; +import org.apache.jackrabbit.mongomk.api.model.Instruction.AddNodeInstruction; +import org.apache.jackrabbit.oak.commons.PathUtils; + + +/** + * Implementation of {@link AddNodeInstruction}. + * + * @author descendants) { + Set children = node.getChildren(); + if (children != null) { + for (Node child : children) { + descendants.add(child); + this.getDescendantsRecursive(child, descendants); + } + } + } +} diff --git a/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/model/RemoveNodeInstructionImpl.java b/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/model/RemoveNodeInstructionImpl.java new file mode 100644 index 0000000..d6b2706 --- /dev/null +++ b/oak-mongomk-impl/src/main/java/org/apache/jackrabbit/mongomk/impl/model/RemoveNodeInstructionImpl.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.impl.model; + +import org.apache.jackrabbit.mongomk.api.model.InstructionVisitor; +import org.apache.jackrabbit.mongomk.api.model.Instruction.RemoveNodeInstruction; +import org.apache.jackrabbit.oak.commons.PathUtils; + + +/** + * Implementation of {@link RemoveNodeInstruction}. + * + * @author instructions = commit.getInstructions(); + Assert.assertEquals(6, instructions.size()); + InstructionAssert.assertAddNodeInstruction((AddNodeInstruction) instructions.get(0), "/a"); + InstructionAssert.assertAddPropertyInstruction((AddPropertyInstruction) instructions.get(1), "/a", "int", 1); + InstructionAssert.assertAddNodeInstruction((AddNodeInstruction) instructions.get(2), "/a/b"); + InstructionAssert.assertAddPropertyInstruction((AddPropertyInstruction) instructions.get(3), "/a/b", "string", + "foo"); + InstructionAssert.assertAddNodeInstruction((AddNodeInstruction) instructions.get(4), "/a/c"); + InstructionAssert.assertAddPropertyInstruction((AddPropertyInstruction) instructions.get(5), "/a/c", "bool", + true); + } + + @Test + public void testSimpleCopy() throws Exception { + StringBuilder sb = new StringBuilder(); + sb.append("*\"a\" : \"b\"\n"); + sb.append("*\"a/b\" : \"a/c\"\n"); + + Commit commit = this.buildAndAssertCommit(sb.toString()); + List instructions = commit.getInstructions(); + assertEquals(2, instructions.size()); + InstructionAssert.assertCopyNodeInstruction((CopyNodeInstruction) instructions.get(0), "/", "/a", "/b"); + InstructionAssert.assertCopyNodeInstruction((CopyNodeInstruction) instructions.get(1), "/", "/a/b", "/a/c"); + } + + @Test + public void testSimpleMove() throws Exception { + StringBuilder sb = new StringBuilder(); + sb.append(">\"a\" : \"b\"\n"); + sb.append(">\"a/b\" : \"a/c\"\n"); + + Commit commit = this.buildAndAssertCommit(sb.toString()); + List instructions = commit.getInstructions(); + assertEquals(2, instructions.size()); + InstructionAssert.assertMoveNodeInstruction((MoveNodeInstruction) instructions.get(0), "/", "/a", "/b"); + InstructionAssert.assertMoveNodeInstruction((MoveNodeInstruction) instructions.get(1), "/", "/a/b", "/a/c"); + } + + @Test + public void testSimpleRemove() throws Exception { + StringBuilder sb = new StringBuilder(); + sb.append("-\"a\""); + // TODO properties + + Commit commit = this.buildAndAssertCommit(sb.toString()); + + List instructions = commit.getInstructions(); + assertEquals(1, instructions.size()); + InstructionAssert.assertRemoveNodeInstruction((RemoveNodeInstruction) instructions.get(0), "/a"); + } + + @Test + public void testSimpleSet() throws Exception { + StringBuilder sb = new StringBuilder(); + sb.append("^\"a\" : \"b\"\n"); + + Commit commit = this.buildAndAssertCommit(sb.toString()); + + List instructions = commit.getInstructions(); + assertEquals(1, instructions.size()); + InstructionAssert.assertSetPropertyInstruction((SetPropertyInstruction) instructions.get(0), "/", "a", "b"); + } + + private Commit buildAndAssertCommit(String commitString) throws Exception { + Commit commit = CommitBuilder.build(ROOT, commitString, MESSAGE); + + assertNotNull(commit); + assertEquals(MESSAGE, commit.getMessage()); + assertNull(commit.getRevisionId()); + return commit; + } +} diff --git a/oak-mongomk-impl/src/test/java/org/apache/jackrabbit/mongomk/builder/NodeBuilderTest.java b/oak-mongomk-impl/src/test/java/org/apache/jackrabbit/mongomk/builder/NodeBuilderTest.java new file mode 100644 index 0000000..da63705 --- /dev/null +++ b/oak-mongomk-impl/src/test/java/org/apache/jackrabbit/mongomk/builder/NodeBuilderTest.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.builder; + +import org.apache.jackrabbit.mongomk.api.model.Node; +import org.apache.jackrabbit.mongomk.impl.NodeAssert; +import org.apache.jackrabbit.mongomk.impl.builder.NodeBuilder; +import org.apache.jackrabbit.mongomk.impl.model.NodeImpl; +import org.junit.Test; + + +/** + * @author expectedChildren = expected.getChildren(); + Set actualChildren = actual.getChildren(); + + if (expectedChildren == null) { + Assert.assertNull(actualChildren); + } else { + Assert.assertNotNull(actualChildren); + Assert.assertEquals(expectedChildren.size(), actualChildren.size()); + + for (Node expectedChild : expectedChildren) { + boolean valid = false; + for (Node actualChild : actualChildren) { + if (expectedChild.getName().equals(actualChild.getName())) { + assertDeepEquals(expectedChild, actualChild); + valid = true; + + break; + } + } + + Assert.assertTrue(valid); + } + } + } + + public static void assertEquals(Collection expecteds, Collection actuals) { + Assert.assertEquals(expecteds.size(), actuals.size()); + + for (Node expected : expecteds) { + boolean valid = false; + for (Node actual : actuals) { + if (expected.getPath().equals(actual.getPath())) { + assertEquals(expected, actual); + valid = true; + + break; + } + } + + Assert.assertTrue(valid); + } + } + + public static void assertEquals(Node expected, Node actual) { + Assert.assertEquals(expected.getName(), actual.getName()); + Assert.assertEquals(expected.getPath(), actual.getPath()); + + String expectedRevisionId = expected.getRevisionId(); + String actualRevisionId = actual.getRevisionId(); + + if (expectedRevisionId == null) { + Assert.assertNull(actualRevisionId); + } + if (actualRevisionId == null) { + Assert.assertNull(expectedRevisionId); + } + + if ((actualRevisionId != null) && (expectedRevisionId != null)) { + Assert.assertEquals(expectedRevisionId, actualRevisionId); + } + + Map expectedProperties = expected.getProperties(); + Map actualProperties = actual.getProperties(); + + if (expectedProperties == null) { + Assert.assertNull(actualProperties); + } + + if (actualProperties == null) { + Assert.assertNull(expectedProperties); + } + + if ((actualProperties != null) && (expectedProperties != null)) { + Assert.assertEquals(expectedProperties, actualProperties); + } + } + + private NodeAssert() { + // no instantiation + } +} diff --git a/oak-mongomk-impl/src/test/java/org/apache/jackrabbit/mongomk/impl/json/JsopParserTest.java b/oak-mongomk-impl/src/test/java/org/apache/jackrabbit/mongomk/impl/json/JsopParserTest.java new file mode 100644 index 0000000..5bd5504 --- /dev/null +++ b/oak-mongomk-impl/src/test/java/org/apache/jackrabbit/mongomk/impl/json/JsopParserTest.java @@ -0,0 +1,543 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.impl.json; + +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; + +import org.apache.jackrabbit.mongomk.impl.json.DefaultJsopHandler; +import org.apache.jackrabbit.mongomk.impl.json.JsopParser; +import org.junit.Assert; +import org.junit.Test; + +/** + * @author nodesAdded; + private final List nodesCopied; + private final List nodesMoved; + private final List nodesRemoved; + private final List propertiesAdded; + private final List propertiesSet; + + CountingHandler() { + this.nodesAdded = new LinkedList(); + this.nodesCopied = new LinkedList(); + this.nodesMoved = new LinkedList(); + this.nodesRemoved = new LinkedList(); + this.propertiesAdded = new LinkedList(); + this.propertiesSet = new LinkedList(); + } + + public void assertNodeCopied(String parentPath, String oldPath, String newPath) { + NodeMoved expected = new NodeMoved(parentPath, oldPath, newPath); + + int firstIndex = this.nodesCopied.indexOf(expected); + int lastIndex = this.nodesCopied.lastIndexOf(expected); + + Assert.assertTrue(firstIndex != -1); + Assert.assertEquals(firstIndex, lastIndex); + } + + public void assertNodeMoved(String parentPath, String oldPath, String newPath) { + NodeMoved expected = new NodeMoved(parentPath, oldPath, newPath); + + int firstIndex = this.nodesMoved.indexOf(expected); + int lastIndex = this.nodesMoved.lastIndexOf(expected); + + Assert.assertTrue(firstIndex != -1); + Assert.assertEquals(firstIndex, lastIndex); + } + + public void assertNodeRemoved(String path, String name) { + Node expected = new Node(path, name); + + int firstIndex = this.nodesRemoved.indexOf(expected); + int lastIndex = this.nodesRemoved.lastIndexOf(expected); + + Assert.assertTrue(firstIndex != -1); + Assert.assertEquals(firstIndex, lastIndex); + } + + public void assertNoOfNodesCopied(int num) { + Assert.assertEquals(num, this.nodesCopied.size()); + } + + public void assertNoOfNodesMoved(int num) { + Assert.assertEquals(num, this.nodesMoved.size()); + } + + public void assertNoOfNodesRemoved(int num) { + Assert.assertEquals(num, this.nodesRemoved.size()); + } + + public void assertNoOfPropertiesSet(int num) { + Assert.assertEquals(num, this.propertiesSet.size()); + } + + public void assertPropertiesAdded(int num) { + Assert.assertEquals(num, this.propertiesAdded.size()); + } + + @Override + public void nodeAdded(String path, String name) { + this.nodesAdded.add(new Node(path, name)); + } + + @Override + public void nodeCopied(String rootPath, String oldPath, String newPath) { + this.nodesCopied.add(new NodeMoved(rootPath, oldPath, newPath)); + } + + @Override + public void nodeMoved(String rootPath, String oldPath, String newPath) { + this.nodesMoved.add(new NodeMoved(rootPath, oldPath, newPath)); + } + + @Override + public void nodeRemoved(String path, String name) { + this.nodesRemoved.add(new Node(path, name)); + } + + @Override + public void propertyAdded(String path, String key, Object value) { + this.propertiesAdded.add(new Property(path, key, value)); + } + + @Override + public void propertySet(String path, String key, Object value) { + this.propertiesSet.add(new Property(path, key, value)); + } + + void assertNodeAdded(String path, String name) { + Node expected = new Node(path, name); + + int firstIndex = this.nodesAdded.indexOf(expected); + int lastIndex = this.nodesAdded.lastIndexOf(expected); + + Assert.assertTrue(firstIndex != -1); + Assert.assertEquals(firstIndex, lastIndex); + } + + void assertPropertyAdded(String path, String key, Object value) { + Property expected = new Property(path, key, value); + + int firstIndex = this.propertiesAdded.indexOf(expected); + int lastIndex = this.propertiesAdded.lastIndexOf(expected); + + Assert.assertTrue(firstIndex != -1); + Assert.assertEquals(firstIndex, lastIndex); + } + + void assertPropertySet(String path, String key, Object value) { + Property expected = new Property(path, key, value); + + int firstIndex = this.propertiesSet.indexOf(expected); + int lastIndex = this.propertiesSet.lastIndexOf(expected); + + Assert.assertTrue(firstIndex != -1); + Assert.assertEquals(firstIndex, lastIndex); + } + + void assetNoOfNodesAdded(int num) { + Assert.assertEquals(num, this.nodesAdded.size()); + } + + } + + @Test + public void testAddNestedNodes() throws Exception { + String rootPath = "/"; + StringBuilder sb = new StringBuilder(); + sb.append("+\"a\" : { \"integer\" : 123 ,\"b\" : { \"double\" : 123.456 , \"d\" : {} } , \"c\" : { \"string\" : \"string\" }}"); + + CountingHandler countingHandler = new CountingHandler(); + JsopParser jsopParser = new JsopParser(rootPath, sb.toString(), countingHandler); + + jsopParser.parse(); + + countingHandler.assetNoOfNodesAdded(4); + countingHandler.assertNodeAdded("/", "a"); + countingHandler.assertNodeAdded("/a", "b"); + countingHandler.assertNodeAdded("/a/b", "d"); + countingHandler.assertNodeAdded("/a", "c"); + + countingHandler.assertPropertiesAdded(3); + countingHandler.assertPropertyAdded("/a", "integer", 123); + countingHandler.assertPropertyAdded("/a/b", "double", 123.456); + countingHandler.assertPropertyAdded("/a/c", "string", "string"); + } + + @Test + public void testAddNodesAndProperties() throws Exception { + String rootPath = "/"; + StringBuilder sb = new StringBuilder(); + sb.append("+\"a\" : { \"int\" : 1 } \n"); + sb.append("+\"a/b\" : { \"string\" : \"foo\" } \n"); + sb.append("+\"a/c\" : { \"bool\" : true }"); + + CountingHandler countingHandler = new CountingHandler(); + JsopParser jsopParser = new JsopParser(rootPath, sb.toString(), countingHandler); + + jsopParser.parse(); + + countingHandler.assetNoOfNodesAdded(3); + countingHandler.assertNodeAdded("/", "a"); + countingHandler.assertNodeAdded("/a", "b"); + countingHandler.assertNodeAdded("/a", "c"); + + countingHandler.assertPropertiesAdded(3); + countingHandler.assertPropertyAdded("/a", "int", Integer.valueOf(1)); + countingHandler.assertPropertyAdded("/a/b", "string", "foo"); + countingHandler.assertPropertyAdded("/a/c", "bool", Boolean.TRUE); + } + + @Test + public void testAddNodesAndPropertiesSeparately() throws Exception { + String rootPath = "/"; + StringBuilder sb = new StringBuilder(); + sb.append("+\"a\" : {} \n"); + sb.append("+\"a\" : { \"int\" : 1 } \n"); + sb.append("+\"a/b\" : {} \n"); + sb.append("+\"a/b\" : { \"string\" : \"foo\" } \n"); + sb.append("+\"a/c\" : {} \n"); + sb.append("+\"a/c\" : { \"bool\" : true }"); + + CountingHandler countingHandler = new CountingHandler(); + JsopParser jsopParser = new JsopParser(rootPath, sb.toString(), countingHandler); + + jsopParser.parse(); + + countingHandler.assetNoOfNodesAdded(6); + + countingHandler.assertPropertiesAdded(3); + countingHandler.assertPropertyAdded("/a", "int", Integer.valueOf(1)); + countingHandler.assertPropertyAdded("/a/b", "string", "foo"); + countingHandler.assertPropertyAdded("/a/c", "bool", Boolean.TRUE); + } + + @Test + public void testAddPropertiesWithComplexArray() throws Exception { + String rootPath = "/"; + String jsop = "+ \"a\" : { \"array_complex\" : [ 123, 123.456, true, false, null, \"string\", [1,2,3,4,5] ] }"; + + CountingHandler countingHandler = new CountingHandler(); + JsopParser jsopParser = new JsopParser(rootPath, jsop, countingHandler); + + jsopParser.parse(); + + countingHandler.assertPropertiesAdded(1); + countingHandler.assertPropertyAdded( + "/a", + "array_complex", + Arrays.asList(new Object[] { 123, 123.456, true, false, null, "string", + Arrays.asList(new Object[] { 1, 2, 3, 4, 5 }) })); + } + + @Test + public void testAddWithEmptyPath() throws Exception { + String rootPath = ""; + StringBuilder sb = new StringBuilder(); + sb.append("+\"/\" : { \"int\" : 1 } \n"); + + CountingHandler countingHandler = new CountingHandler(); + JsopParser jsopParser = new JsopParser(rootPath, sb.toString(), countingHandler); + + jsopParser.parse(); + + countingHandler.assetNoOfNodesAdded(1); + countingHandler.assertNodeAdded("", "/"); + + countingHandler.assertPropertiesAdded(1); + countingHandler.assertPropertyAdded("/", "int", Integer.valueOf(1)); + } + + @Test + public void testSimpleAddNodes() throws Exception { + String rootPath = "/"; + StringBuilder sb = new StringBuilder(); + sb.append("+\"a\" : {} \n"); + sb.append("+\"a/b\" : {} \n"); + sb.append("+\"a/c\" : {}"); + + CountingHandler countingHandler = new CountingHandler(); + JsopParser jsopParser = new JsopParser(rootPath, sb.toString(), countingHandler); + + jsopParser.parse(); + + countingHandler.assetNoOfNodesAdded(3); + countingHandler.assertNodeAdded("/", "a"); + countingHandler.assertNodeAdded("/a", "b"); + countingHandler.assertNodeAdded("/a", "c"); + } + + @Test + public void testSimpleAddProperties() throws Exception { + String rootPath = "/"; + StringBuilder sb = new StringBuilder(); + sb.append("+ \"a\" : {}"); + sb.append("+ \"a\" : { \"integer\" : 123, \"double\" : 123.456, \"true\" : true, \"false\" : false, \"null\" : null, \"string\" : \"string\", \"array\" : [1,2,3,4,5] }"); + + CountingHandler countingHandler = new CountingHandler(); + JsopParser jsopParser = new JsopParser(rootPath, sb.toString(), countingHandler); + + jsopParser.parse(); + + countingHandler.assertPropertiesAdded(7); + countingHandler.assertPropertyAdded("/a", "integer", 123); + countingHandler.assertPropertyAdded("/a", "double", 123.456); + countingHandler.assertPropertyAdded("/a", "true", true); + countingHandler.assertPropertyAdded("/a", "false", false); + countingHandler.assertPropertyAdded("/a", "null", null); + countingHandler.assertPropertyAdded("/a", "string", "string"); + countingHandler.assertPropertyAdded("/a", "array", Arrays.asList(new Object[] { 1, 2, 3, 4, 5 })); + } + + @Test + public void testSimpleCopyNodes() throws Exception { + String rootPath = "/"; + StringBuilder sb = new StringBuilder(); + sb.append("*\"a\" : \"b\"\n"); + sb.append("*\"a/b\" : \"a/c\"\n"); + + CountingHandler countingHandler = new CountingHandler(); + JsopParser jsopParser = new JsopParser(rootPath, sb.toString(), countingHandler); + jsopParser.parse(); + + countingHandler.assertNoOfNodesCopied(2); + countingHandler.assertNodeCopied("/", "/a", "/b"); + countingHandler.assertNodeCopied("/", "/a/b", "/a/c"); + } + + @Test + public void testSimpleMoveNodes() throws Exception { + String rootPath = "/"; + StringBuilder sb = new StringBuilder(); + sb.append(">\"a\" : \"b\"\n"); + sb.append(">\"a/b\" : \"a/c\"\n"); + + CountingHandler countingHandler = new CountingHandler(); + JsopParser jsopParser = new JsopParser(rootPath, sb.toString(), countingHandler); + jsopParser.parse(); + + countingHandler.assertNoOfNodesMoved(2); + countingHandler.assertNodeMoved("/", "/a", "/b"); + countingHandler.assertNodeMoved("/", "/a/b", "/a/c"); + } + + @Test + public void testSimpleRemoveNodes() throws Exception { + String rootPath = "/"; + String jsop = "-\"a\""; + + CountingHandler countingHandler = new CountingHandler(); + JsopParser jsopParser = new JsopParser(rootPath, jsop, countingHandler); + + jsopParser.parse(); + + countingHandler.assertNoOfNodesRemoved(1); + countingHandler.assertNodeRemoved("/", "a"); + } + + @Test + public void testSimpleSetNodes() throws Exception { + String rootPath = "/"; + StringBuilder sb = new StringBuilder(); + sb.append("^\"a\" : \"b\""); + + CountingHandler countingHandler = new CountingHandler(); + JsopParser jsopParser = new JsopParser(rootPath, sb.toString(), countingHandler); + jsopParser.parse(); + + countingHandler.assertNoOfPropertiesSet(1); + // TODO - Is this correct? + countingHandler.assertPropertySet("/", "a", "b"); + } +} diff --git a/oak-mongomk-perf/pom.xml b/oak-mongomk-perf/pom.xml new file mode 100644 index 0000000..f37f61f --- /dev/null +++ b/oak-mongomk-perf/pom.xml @@ -0,0 +1,98 @@ + + + + + + 4.0.0 + + + org.apache.jackrabbit + oak-parent + 0.5-SNAPSHOT + + + oak-mongomk-perf + + + + org.apache.jackrabbit + oak-mongomk-api + ${project.version} + + + org.apache.jackrabbit + oak-mongomk-impl + ${project.version} + + + org.apache.jackrabbit + oak-mongomk + ${project.version} + + + log4j + log4j + 1.2.16 + + + commons-cli + commons-cli + 1.2 + + + com.jamonapi + jamon + 2.4 + + + junit + junit + + + + + + + maven-assembly-plugin + + + jar-with-dependencies + + + + org.apache.jackrabbit.mongomk.perf.MicroKernelPerf + + + + + + make-assembly + package + + single + + + + + + + + diff --git a/oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/perf/BlobStoreFS.java b/oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/perf/BlobStoreFS.java new file mode 100644 index 0000000..c7dae74 --- /dev/null +++ b/oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/perf/BlobStoreFS.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.perf; + +import java.io.File; +import java.io.InputStream; + +import org.apache.jackrabbit.mongomk.api.BlobStore; + + +/** + * @author addedNodes = new LinkedList(); + private final Map> addedProperties = new HashMap>(); + + @Override + public void nodeAdded(String parentPath, String name) { + addedNodes.add(PathUtils.concat(parentPath, name)); + } + + @Override + public void propertyAdded(String path, String key, Object value) { + List properties = addedProperties.get(path); + if (properties == null) { + properties = new LinkedList(); + addedProperties.put(path, properties); + } + properties.add(key); + } + } + + private static final Logger LOG = Logger.getLogger(MicroKernelPerfClient.class); + + private static final Logger PERF = Logger.getLogger("PERFORMANCE"); + + private static void assertNodeExists(String path, String node, JSONObject result) throws Exception { + if (!path.equals(node)) { + JSONObject temp = result; + for (String segment : PathUtils.elements(node)) { + temp = temp.getJSONObject(segment); + + if (temp == null) { + throw new Exception(String.format("The node %s could not be found!", node)); + } + } + } + } + + private static void assertPropertyExists(String path, String property, JSONObject result) throws Exception { + JSONObject temp = result; + for (String segment : PathUtils.elements(path)) { + temp = temp.optJSONObject(segment); + + if (temp == null) { + throw new Exception(String.format("The node %s could not be found!", path)); + } + } + + Object value = temp.opt(property); + if (value == null) { + throw new Exception(String.format("The node %s did not containt the property %s!", path, property)); + } + } + + private Monitor commitMonitor; + private final Config config; + private Monitor getNodesMonitor; + + private MongoMicroKernel microKernel; + + private MongoConnection mongoConnection; + + private RandomJsopGenerator randomJsopGenerator; + + public MicroKernelPerfClient(Config config) throws Exception { + this.config = config; + + initMongo(); + initMicroKernel(); + initRandomJsopGenerator(); + initMonitoring(); + } + + public void start() throws Exception { + LOG.info("Starting client..."); + + startCommitting(); + } + + private void createStats(VerificationHandler handler, JSONObject result) { + long numOfNodes = mongoConnection.getNodeCollection().count(); + long numOfCommits = mongoConnection.getCommitCollection().count(); + + Stats commitStats = new Stats("commit", commitMonitor.getLastValue(), numOfCommits, numOfNodes, + handler.addedNodes.size() + handler.addedProperties.size()); + + Stats getNodesStats = new Stats("getNodes", getNodesMonitor.getLastValue(), numOfCommits, numOfNodes, + numOfNodes - handler.addedNodes.size()); + + DBCollection statsCollection = mongoConnection.getDB().getCollection("statistics"); + statsCollection.insert(new Stats[] { commitStats, getNodesStats }, WriteConcern.NONE); + } + + private void initMicroKernel() throws Exception { + NodeStore nodeStore = new NodeStoreMongo(mongoConnection); + BlobStore blobStore = new BlobStoreFS(System.getProperty("java.io.tmpdir")); + + microKernel = new MongoMicroKernel(nodeStore, blobStore); + } + + private void initMongo() throws Exception { + mongoConnection = new MongoConnection(config.getMongoHost(), config.getMongoPort(), config.getMongoDatabase()); + } + + private void initMonitoring() { + commitMonitor = MonitorFactory.getTimeMonitor("commit"); + getNodesMonitor = MonitorFactory.getTimeMonitor("getNodes"); + } + + private void initRandomJsopGenerator() throws Exception { + randomJsopGenerator = new RandomJsopGenerator(); + } + + private void startCommitting() throws Exception { + while (true) { + RandomJsop randomJsop = randomJsopGenerator.nextRandom(); + + String commitPath = randomJsop.getPath(); + String jsonDiff = randomJsop.getJsop(); + String revisionId = null; + String message = randomJsop.getMessage(); + + commitMonitor.start(); + String newRevisionId = microKernel.commit(commitPath, jsonDiff, revisionId, message); + commitMonitor.stop(); + PERF.info(commitMonitor); + LOG.debug(String.format("Committed (%s): %s, %s\n%s", newRevisionId, commitPath, message, jsonDiff)); + + getNodesMonitor.start(); + String getPath = "".equals(commitPath) ? "/" : commitPath; + String json = microKernel.getNodes(getPath, newRevisionId, -1, 0, -1, null); + getNodesMonitor.stop(); + PERF.info(getNodesMonitor); + LOG.debug(String.format("GetNodes (%s: %s", newRevisionId, json)); + + VerificationHandler handler = new VerificationHandler(); + JsopParser jsopParser = new JsopParser(commitPath, jsonDiff, handler); + jsopParser.parse(); + + JSONObject result = new JSONObject(json); + + verify(handler, result, getPath); + createStats(handler, result); + + randomJsopGenerator.setSeed(getPath, json); + } + } + + private void verify(VerificationHandler handler, JSONObject result, String getPath) throws Exception { + for (String node : handler.addedNodes) { + assertNodeExists(getPath, node, result); + } + + for (Map.Entry> entry : handler.addedProperties.entrySet()) { + String path = entry.getKey(); + List properties = entry.getValue(); + for (String property : properties) { + assertPropertyExists(path, property, result); + } + } + } +} diff --git a/oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/perf/MicroKernelPerfMaster.java b/oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/perf/MicroKernelPerfMaster.java new file mode 100644 index 0000000..011da32 --- /dev/null +++ b/oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/perf/MicroKernelPerfMaster.java @@ -0,0 +1,257 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.perf; + +import java.util.LinkedList; +import java.util.List; + +import org.apache.jackrabbit.mongomk.MongoConnection; +import org.apache.jackrabbit.mongomk.NodeStoreMongo; +import org.apache.jackrabbit.mongomk.api.BlobStore; +import org.apache.jackrabbit.mongomk.api.NodeStore; +import org.apache.jackrabbit.mongomk.impl.MongoMicroKernel; +import org.apache.jackrabbit.mongomk.impl.json.DefaultJsopHandler; +import org.apache.jackrabbit.mongomk.impl.json.JsopParser; +import org.apache.jackrabbit.mongomk.model.CommitMongo; +import org.apache.jackrabbit.mongomk.model.HeadMongo; +import org.apache.jackrabbit.oak.commons.PathUtils; +import org.apache.log4j.Logger; +import org.json.JSONArray; +import org.json.JSONObject; + +import com.mongodb.DBCollection; +import com.mongodb.DBCursor; +import com.mongodb.DBObject; +import com.mongodb.QueryBuilder; + +/** + * @author commitMongos = this.waitForCommit(); + for (CommitMongo commitMongo : commitMongos) { + if (commitMongo.hasFailed()) { + LOG.info(String.format("Skipping commit %d because it failed", commitMongo.getRevisionId())); + this.lastRevId = commitMongo.getRevisionId(); + } else { + LOG.info(String.format("Verifying commit %d", commitMongo.getRevisionId())); + this.verifyCommit(commitMongo); + this.verifyCommitOrder(commitMongo); + this.lastRevId = commitMongo.getRevisionId(); + this.lastCommitRevId = commitMongo.getRevisionId(); + } + } + } + } + + private void verifyCommit(CommitMongo commitMongo) throws Exception { + String path = commitMongo.getPath(); + String jsop = commitMongo.getDiff(); + + JsopParser jsopParser = new JsopParser(path, jsop, this.handler); + jsopParser.parse(); + + String json = this.microKernel.getNodes("/", String.valueOf(commitMongo.getRevisionId()), -1, 0, -1, null); + JSONObject resultJson = new JSONObject(json); + + this.verifyEquality(this.handler.jsonObject, resultJson); + + LOG.info(String.format("Successfully verified commit %d", commitMongo.getRevisionId())); + } + + private void verifyCommitOrder(CommitMongo commitMongo) throws Exception { + long baseRevId = commitMongo.getBaseRevisionId(); + long revId = commitMongo.getRevisionId(); + if (baseRevId != this.lastCommitRevId) { + throw new Exception(String.format( + "Revision %d has a base revision of %d but last successful commit was %d", revId, baseRevId, + this.lastCommitRevId)); + } + } + + private void verifyEquality(JSONObject expected, JSONObject actual) throws Exception { + LOG.debug(String.format("Verifying for equality %s (expected) vs %s (actual)", expected, actual)); + + try { + if (expected.length() != (actual.length() - 1)) { // substract 1 b/c of :childCount + throw new Exception(String.format( + "Unequal number of children/properties: %d (expected) vs %d (actual)", expected.length(), + actual.length() - 1)); + } + + JSONArray expectedNames = expected.names(); + if (expectedNames != null) { + for (int i = 0; i < expectedNames.length(); ++i) { + String name = expectedNames.getString(i); + + Object expectedValue = expected.get(name); + Object actualValue = actual.get(name); + + if ((expectedValue instanceof JSONObject) && (actualValue instanceof JSONObject)) { + this.verifyEquality((JSONObject) expectedValue, (JSONObject) actualValue); + } else if ((expectedValue != null) && (actualValue != null)) { + if (!expectedValue.equals(actualValue)) { + throw new Exception(String.format( + "Key %s: Expected value '%s' does not macht actual value '%s'", name, + expectedValue, actualValue)); + } + } else if (expectedValue != null) { + throw new Exception(String.format( + "Key %s: Did not find an actual value for expected value '%s'", name, expectedValue)); + } else if (actualValue != null) { + throw new Exception(String.format( + "Key %s: Did not find an expected value for actual value '%s'", name, actualValue)); + } + } + } + } catch (Exception e) { + LOG.error( + String.format("Verificytion for equality failed: %s (expected) vs %s (actual)", expected, actual), + e); + throw e; + } + } + + private List waitForCommit() { + // TODO Change this to MicroKernel#waitForCommit + List commitMongos = new LinkedList(); + this.lastHeadRevId = 0L; + + while (true) { + LOG.debug("Waiting for commit..."); + + DBCollection headCollection = this.mongoConnection.getHeadCollection(); + HeadMongo headMongo = (HeadMongo) headCollection.findOne(); + if (this.lastHeadRevId < headMongo.getHeadRevisionId()) { + DBCollection commitCollection = this.mongoConnection.getCommitCollection(); + DBObject query = QueryBuilder.start(CommitMongo.KEY_REVISION_ID).greaterThan(this.lastRevId) + .and(CommitMongo.KEY_REVISION_ID).lessThanEquals(headMongo.getHeadRevisionId()).get(); + DBObject sort = QueryBuilder.start(CommitMongo.KEY_REVISION_ID).is(1).get(); + DBCursor dbCursor = commitCollection.find(query).sort(sort); + while (dbCursor.hasNext()) { + commitMongos.add((CommitMongo) dbCursor.next()); + } + + if (commitMongos.size() > 0) { + LOG.debug(String.format("Found %d new commits", commitMongos.size())); + + break; + } + this.lastHeadRevId = headMongo.getHeadRevisionId(); + } + try { + Thread.sleep(2000); + } catch (InterruptedException e) { + // noop + } + } + + return commitMongos; + } +} diff --git a/oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/perf/PrepareEnvironment.java b/oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/perf/PrepareEnvironment.java new file mode 100644 index 0000000..6640600 --- /dev/null +++ b/oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/perf/PrepareEnvironment.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.perf; + +import org.apache.jackrabbit.mongomk.MongoConnection; +import org.apache.jackrabbit.mongomk.util.MongoUtil; +import org.apache.log4j.Logger; + + +/** + * @author = nodesNumber) + return; + createNode(mk, parentFolderName, nodePrefixName + index); + for (int i = 1; i <= numberOfChildren; i++) { + if (!parentFolderName.endsWith("/")) + parentFolderName = parentFolderName + "/"; + insertNode(mk, parentFolderName + nodePrefixName + index, index + * numberOfChildren + i, numberOfChildren, nodesNumber, nodePrefixName); + } + + } + + /** + * Creates a new node. + * + * @param mk + * @param parentNode + * @param name + * @return + */ + public static String createNode(MicroKernel mk, String parentNode, String name) { + + return mk.commit(parentNode, "+\"" + name + "\" : {} \n", null, ""); + + } +} diff --git a/oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/performance/write/WriteNodesTest.java b/oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/performance/write/WriteNodesTest.java new file mode 100644 index 0000000..64e5ec7 --- /dev/null +++ b/oak-mongomk-perf/src/main/java/org/apache/jackrabbit/mongomk/performance/write/WriteNodesTest.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.performance.write; + +import org.apache.jackrabbit.mk.api.MicroKernel; +import org.apache.jackrabbit.mongomk.util.MongoUtil; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * Measures the time needed for creating different tree node structures.Only one + * mongoMk is used for writing operation. + * + * @author rogoz + * + */ +public class WriteNodesTest extends MultipleNodesTestBase { + static MicroKernel mk; + + @BeforeClass + public static void init() throws Exception { + readConfig(); + initMongo(); + mk=initMicroKernel(); + } + + @Before + public void cleanDatabase() { + MongoUtil.initDatabase(mongoConnection); + } + + /** + * Creates 10000 nodes, all with on the same level with the same parent + * node. + */ + @Test + public void addNodesInLine() { + int nodesNumber = 10000; + TestUtil.insertNode(mk, "/", 0, 0, nodesNumber, "N"); + } + + /** + * Creates 10000 nodes, all of them having 10 children nodes. + */ + @Test + public void addNodes10Children() { + int nodesNumber = 10000; + TestUtil.insertNode(mk, "/", 0, 10, nodesNumber, "N"); + } + + /** + * Creates 10000 nodes, all of them having 100 children nodes. + */ + @Test + public void addNodes100Children() { + int nodesNumber = 10000; + TestUtil.insertNode(mk, "/", 0, 100, nodesNumber, "N"); + } + + /** + * Creates 10000 nodes, all of them on different levels.Each node has one + * child only. + */ + @Test + public void addNodes1Child() { + int nodesNumber = 2000; + TestUtil.insertNode(mk, "/", 0, 1, nodesNumber,"N"); + } + +} diff --git a/oak-mongomk-perf/src/main/resources/config.cfg b/oak-mongomk-perf/src/main/resources/config.cfg new file mode 100644 index 0000000..25858a1 --- /dev/null +++ b/oak-mongomk-perf/src/main/resources/config.cfg @@ -0,0 +1,21 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +master.host = localhost +master.port = 12345 + +mongo.host = localhost +mongo.port = 27017 +mongo.db = mk-tf \ No newline at end of file diff --git a/oak-mongomk-perf/src/main/resources/log4j.cfg b/oak-mongomk-perf/src/main/resources/log4j.cfg new file mode 100644 index 0000000..015e4dd --- /dev/null +++ b/oak-mongomk-perf/src/main/resources/log4j.cfg @@ -0,0 +1,22 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +log4j.rootLogger=INFO, console + +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=[%t] %-5p %c: %m%n + +log4j.logger.PERFORMANCE=INFO \ No newline at end of file diff --git a/oak-mongomk-test/pom.xml b/oak-mongomk-test/pom.xml new file mode 100644 index 0000000..79a0d71 --- /dev/null +++ b/oak-mongomk-test/pom.xml @@ -0,0 +1,69 @@ + + + + + + 4.0.0 + + + org.apache.jackrabbit + oak-parent + 0.5-SNAPSHOT + + + oak-mongomk-test + + + + org.apache.jackrabbit + oak-mongomk-api + ${project.version} + + + org.apache.jackrabbit + oak-mongomk-impl + ${project.version} + + + org.apache.jackrabbit + oak-mongomk + ${project.version} + test + + + org.json + json + 20090211 + + + junit + junit + test + + + org.apache.jackrabbit + oak-it-mk + ${project.version} + test + + + + diff --git a/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoDataStoreIT.java b/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoDataStoreIT.java new file mode 100644 index 0000000..834875f --- /dev/null +++ b/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoDataStoreIT.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.test.it; + +import org.apache.jackrabbit.mk.test.DataStoreIT; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +@RunWith(Suite.class) +@Suite.SuiteClasses({ + DataStoreIT.class, +}) +public class MongoDataStoreIT { +} diff --git a/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoEverythingIT.java b/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoEverythingIT.java new file mode 100644 index 0000000..35e3373 --- /dev/null +++ b/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoEverythingIT.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.test.it; + +import org.apache.jackrabbit.mk.test.MicroKernelTestSuite; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +@RunWith(Suite.class) +@Suite.SuiteClasses({ + MicroKernelTestSuite.class +}) +public class MongoEverythingIT { +} diff --git a/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoMicroKernelFixture.java b/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoMicroKernelFixture.java new file mode 100644 index 0000000..0d3b011 --- /dev/null +++ b/oak-mongomk-test/src/test/java/org/apache/jackrabbit/mongomk/test/it/MongoMicroKernelFixture.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.test.it; + +import java.io.InputStream; +import java.util.Properties; + +import org.apache.jackrabbit.mk.api.MicroKernel; +import org.apache.jackrabbit.mk.test.MicroKernelFixture; +import org.apache.jackrabbit.mongomk.BlobStoreMongo; +import org.apache.jackrabbit.mongomk.MongoConnection; +import org.apache.jackrabbit.mongomk.NodeStoreMongo; +import org.apache.jackrabbit.mongomk.api.BlobStore; +import org.apache.jackrabbit.mongomk.api.NodeStore; +import org.apache.jackrabbit.mongomk.impl.MongoMicroKernel; +import org.apache.jackrabbit.mongomk.util.MongoUtil; +import org.junit.Assert; + + +/** + * @author + + + + + 4.0.0 + + + org.apache.jackrabbit + oak-parent + 0.5-SNAPSHOT + + + oak-mongomk + + + + commons-io + commons-io + 2.3 + + + commons-codec + commons-codec + 1.6 + + + junit + junit + test + + + org.easymock + easymock + 3.1 + test + + + log4j + log4j + 1.2.16 + + + org.apache.jackrabbit + oak-mongomk-api + ${project.version} + + + org.apache.jackrabbit + oak-mongomk-impl + ${project.version} + + + org.mongodb + mongo-java-driver + 2.7.3 + + + org.apache.jackrabbit + oak-commons + 0.5-SNAPSHOT + + + org.apache.jackrabbit + oak-mongomk-impl + ${project.version} + test-jar + test + + + + diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/BlobStoreMongo.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/BlobStoreMongo.java new file mode 100644 index 0000000..e821133 --- /dev/null +++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/BlobStoreMongo.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk; + +import java.io.InputStream; + +import org.apache.jackrabbit.mongomk.api.BlobStore; +import org.apache.jackrabbit.mongomk.api.command.Command; +import org.apache.jackrabbit.mongomk.api.command.CommandExecutor; +import org.apache.jackrabbit.mongomk.command.GetBlobLengthCommandMongo; +import org.apache.jackrabbit.mongomk.command.ReadBlobCommandMongo; +import org.apache.jackrabbit.mongomk.command.WriteBlobCommandMongo; +import org.apache.jackrabbit.mongomk.impl.command.CommandExecutorImpl; + +public class BlobStoreMongo implements BlobStore { + + private final MongoConnection mongoConnection; + private final CommandExecutor commandExecutor; + + public BlobStoreMongo(MongoConnection mongoConnection) { + this.mongoConnection = mongoConnection; + this.commandExecutor = new CommandExecutorImpl(); + } + + @Override + public long getBlobLength(String blobId) throws Exception { + Command command = new GetBlobLengthCommandMongo(mongoConnection, blobId); + return commandExecutor.execute(command); + } + + @Override + public int readBlob(String blobId, long blobOffset, byte[] buffer, int bufferOffset, int length) throws Exception { + Command command = new ReadBlobCommandMongo(mongoConnection, blobId, blobOffset, buffer, bufferOffset, length); + return commandExecutor.execute(command); + } + + @Override + public String writeBlob(InputStream is) throws Exception { + Command command = new WriteBlobCommandMongo(mongoConnection, is); + return commandExecutor.execute(command); + } +} diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/MongoConnection.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/MongoConnection.java new file mode 100644 index 0000000..264f610 --- /dev/null +++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/MongoConnection.java @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk; + +import org.apache.jackrabbit.mongomk.model.CommitMongo; +import org.apache.jackrabbit.mongomk.model.HeadMongo; +import org.apache.jackrabbit.mongomk.model.NodeMongo; + +import com.mongodb.DB; +import com.mongodb.DBCollection; +import com.mongodb.Mongo; +import com.mongodb.gridfs.GridFS; + +/** + * The {@code MongoConnection} contains connection properties for the {@code MongoDB}. + * + * @author commits = new FetchValidCommitsQuery(mongoConnection, + fromRevisionId, toRevisionId).execute(); + + CommitMongo toCommit = getCommit(commits, toRevisionId); + + CommitMongo fromCommit; + if (toRevisionId.equals(fromRevisionId)) { + fromCommit = toCommit; + } else { + fromCommit = getCommit(commits, fromRevisionId); + if (fromCommit == null || (fromCommit.getTimestamp() > toCommit.getTimestamp())) { + // negative range, return empty journal + return "[]"; + } + } + + JsopBuilder commitBuff = new JsopBuilder().array(); + // iterate over commits in chronological order, + // starting with oldest commit + for (int i = commits.size() - 1; i >= 0; i--) { + CommitMongo commit = commits.get(i); + //if (commit.getParentId() == null) { + // continue; + //} + String diff = commit.getDiff(); + // FIXME Check that filter really works. + if (!filtered || commit.getAffectedPaths().contains(path)) { + commitBuff.object() + .key("id").value(String.valueOf(commit.getRevisionId())) + .key("ts").value(commit.getTimestamp()) + .key("msg").value(commit.getMessage()) + .key("changes").value(diff).endObject(); + } + } + return commitBuff.endArray().toString(); + } + + private CommitMongo getCommit(List commits, String toRevisionId) { + for (CommitMongo commit : commits) { + if (String.valueOf(commit.getRevisionId()).equals(toRevisionId)) { + return commit; + } + } + return null; + } + + @Override + public String getRevisionHistory(long since, int maxEntries, String path) { + path = (path == null || "".equals(path)) ? "/" : path; + boolean filtered = !"/".equals(path); + maxEntries = maxEntries < 0 ? Integer.MAX_VALUE : maxEntries; + + List history = new FetchValidCommitsQuery(mongoConnection, maxEntries).execute(); + JsopBuilder buff = new JsopBuilder().array(); + for (int i = history.size() - 1; i >= 0; i--) { + CommitMongo commit = history.get(i); + if (commit.getTimestamp() >= since) { + // FIXME [Mete] Check that filter really works. + if (!filtered || commit.getAffectedPaths().contains(path)) { + buff.object() + .key("id").value(String.valueOf(commit.getRevisionId())) + .key("ts").value(commit.getTimestamp()) + .key("msg").value(commit.getMessage()) + .endObject(); + } + } + } + + return buff.endArray().toString(); + } + + @Override + public String waitForCommit(String oldHeadRevisionId, long timeout) throws InterruptedException { + long startTimestamp = System.currentTimeMillis(); + long initialHeadRevisionId = getHeadRevisionId(); + + if (timeout <= 0) { + return String.valueOf(initialHeadRevisionId); + } + + long oldHeadRevision = MongoUtil.toMongoRepresentation(oldHeadRevisionId); + if (oldHeadRevision < initialHeadRevisionId) { + return String.valueOf(initialHeadRevisionId); + } + + long waitForCommitPollMillis = Math.min(WAIT_FOR_COMMIT_POLL_MILLIS, timeout); + while (true) { + long headRevisionId = getHeadRevisionId(); + long now = System.currentTimeMillis(); + if (headRevisionId != initialHeadRevisionId || now - startTimestamp >= timeout) { + return String.valueOf(headRevisionId); + } + Thread.sleep(waitForCommitPollMillis); + } + } + + private long getHeadRevisionId() { + DBCollection headCollection = mongoConnection.getHeadCollection(); + HeadMongo headMongo = (HeadMongo)headCollection.findOne(); + long headRevisionId = headMongo.getHeadRevisionId(); + return headRevisionId; + } +} diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/CommitCommandMongo.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/CommitCommandMongo.java new file mode 100644 index 0000000..2ebad30 --- /dev/null +++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/CommitCommandMongo.java @@ -0,0 +1,289 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.command; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.jackrabbit.mongomk.MongoConnection; +import org.apache.jackrabbit.mongomk.api.command.AbstractCommand; +import org.apache.jackrabbit.mongomk.api.model.Commit; +import org.apache.jackrabbit.mongomk.api.model.Instruction; +import org.apache.jackrabbit.mongomk.model.CommitCommandInstructionVisitor; +import org.apache.jackrabbit.mongomk.model.CommitMongo; +import org.apache.jackrabbit.mongomk.model.HeadMongo; +import org.apache.jackrabbit.mongomk.model.NodeMongo; +import org.apache.jackrabbit.mongomk.query.FetchNodesForRevisionQuery; +import org.apache.jackrabbit.mongomk.query.ReadAndIncHeadRevisionQuery; +import org.apache.jackrabbit.mongomk.query.SaveAndSetHeadRevisionQuery; +import org.apache.jackrabbit.mongomk.query.SaveCommitQuery; +import org.apache.jackrabbit.mongomk.query.SaveNodesQuery; +import org.apache.log4j.Logger; + +import com.mongodb.BasicDBObject; +import com.mongodb.DBCollection; +import com.mongodb.DBObject; +import com.mongodb.QueryBuilder; +import com.mongodb.WriteResult; + +/** + * A {@code Command} for committing into {@code MongoDB}. + * + * @author (affectedPaths)); + commitMongo.setBaseRevId(headMongo.getHeadRevisionId()); + } + + private void createMongoNodes() throws Exception { + CommitCommandInstructionVisitor visitor = new CommitCommandInstructionVisitor(mongoConnection, + headMongo.getHeadRevisionId()); + for (Instruction instruction : commit.getInstructions()) { + instruction.accept(visitor); + } + + Map pathNodeMap = visitor.getPathNodeMap(); + + affectedPaths = pathNodeMap.keySet(); // TODO Original copies and moved nodes must be included! + nodeMongos = new HashSet(pathNodeMap.values()); + for (NodeMongo nodeMongo : nodeMongos) { + nodeMongo.setRevisionId(revisionId); + } + } + + private void createRevision() { + revisionId = String.valueOf(headMongo.getNextRevisionId() - 1); + } + + private void markAsFailed() throws Exception { + DBCollection commitCollection = mongoConnection.getCommitCollection(); + DBObject query = QueryBuilder.start("_id").is(commitMongo.getObjectId("_id")).get(); + DBObject update = new BasicDBObject("$set", new BasicDBObject(CommitMongo.KEY_FAILED, Boolean.TRUE)); + WriteResult writeResult = commitCollection.update(query, update); + if (writeResult.getError() != null) { + throw new Exception(String.format("Update wasn't successful: %s", writeResult)); // TODO now what? + } + } + + private void mergeNodes() { + for (NodeMongo existingNode : existingNodes) { + for (NodeMongo committingNode : nodeMongos) { + if (existingNode.getPath().equals(committingNode.getPath())) { + logger.debug(String.format("Found existing node to merge: %s", existingNode.getPath())); + logger.debug(String.format("Existing node: %s", existingNode)); + logger.debug(String.format("Committing node: %s", committingNode)); + + Map existingProperties = existingNode.getProperties(); + + if (existingProperties != null) { + committingNode.setProperties(existingProperties); + + logger.debug(String.format("Merged properties for %s: %s", existingNode.getPath(), + existingProperties)); + } + + List existingChildren = existingNode.getChildren(); + + if (existingChildren != null) { + committingNode.setChildren(existingChildren); + + logger.debug(String.format("Merged children for %s: %s", existingNode.getPath(), existingChildren)); + } + + committingNode.setBaseRevisionId(existingNode.getRevisionId()); + + logger.debug(String.format("Merged node for %s: %s", existingNode.getPath(), committingNode)); + + break; + } + } + } + } + + private void prepareMongoNodes() { + for (NodeMongo committingNode : nodeMongos) { + logger.debug(String.format("Preparing children (added and removed) of %s", committingNode.getPath())); + logger.debug(String.format("Committing node: %s", committingNode)); + + List children = committingNode.getChildren(); + if (children == null) { + children = new LinkedList(); + } + + List addedChildren = committingNode.getAddedChildren(); + if (addedChildren != null) { + children.addAll(addedChildren); + } + + List removedChildren = committingNode.getRemovedChildren(); + if (removedChildren != null) { + children.removeAll(removedChildren); + } + + if (!children.isEmpty()) { + Set temp = new HashSet(children); // remove all duplicates + committingNode.setChildren(new LinkedList(temp)); + } else { + committingNode.setChildren(null); + } + + Map properties = committingNode.getProperties(); + if (properties == null) { + properties = new HashMap(); + } + + Map addedProperties = committingNode.getAddedProps(); + if (addedProperties != null) { + properties.putAll(addedProperties); + } + + Map removedProperties = committingNode.getRemovedProps(); + if (removedProperties != null) { + for (Map.Entry entry : removedProperties.entrySet()) { + properties.remove(entry.getKey()); + } + } + + if (!properties.isEmpty()) { + committingNode.setProperties(properties); + } else { + committingNode.setProperties(null); + } + + logger.debug(String.format("Prepared committing node: %s", committingNode)); + } + } + + private void readAndIncHeadRevision() throws Exception { + headMongo = new ReadAndIncHeadRevisionQuery(mongoConnection).execute(); + } + + private void readExistingNodes() { + Set paths = new HashSet(); + for (NodeMongo nodeMongo : nodeMongos) { + paths.add(nodeMongo.getPath()); + } + + existingNodes = new FetchNodesForRevisionQuery(mongoConnection, paths, + String.valueOf(headMongo.getHeadRevisionId())).execute(); + } + + private void saveCommit() throws Exception { + new SaveCommitQuery(mongoConnection, commitMongo).execute(); + } + + private void saveNodes() throws Exception { + new SaveNodesQuery(mongoConnection, nodeMongos).execute(); + } +} diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/ConflictingCommitException.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/ConflictingCommitException.java new file mode 100644 index 0000000..c72f051 --- /dev/null +++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/ConflictingCommitException.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.command; + +public class ConflictingCommitException extends Exception { + + private static final long serialVersionUID = -5827664000083665577L; + + public ConflictingCommitException() { + super(); + } + + public ConflictingCommitException(String message) { + super(message); + } + + public ConflictingCommitException(Throwable t) { + super(t); + } + + public ConflictingCommitException(String message, Throwable t) { + super(message, t); + } +} diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/GetBlobLengthCommandMongo.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/GetBlobLengthCommandMongo.java new file mode 100644 index 0000000..3b91f5c --- /dev/null +++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/GetBlobLengthCommandMongo.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.command; + +import org.apache.jackrabbit.mongomk.MongoConnection; +import org.apache.jackrabbit.mongomk.api.command.AbstractCommand; + +import com.mongodb.BasicDBObject; +import com.mongodb.gridfs.GridFS; +import com.mongodb.gridfs.GridFSDBFile; + +public class GetBlobLengthCommandMongo extends AbstractCommand { + + private final MongoConnection mongoConnection; + private final String blobId; + + public GetBlobLengthCommandMongo(MongoConnection mongoConnection, String blobId) { + this.mongoConnection = mongoConnection; + this.blobId = blobId; + } + + @Override + public Long execute() throws Exception { + GridFS gridFS = mongoConnection.getGridFS(); + GridFSDBFile gridFSDBFile = gridFS.findOne(new BasicDBObject("md5", blobId)); + if (gridFSDBFile == null) { + throw new Exception("Blob does not exiss"); + } + return gridFSDBFile.getLength(); + } +} diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/GetHeadRevisionCommandMongo.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/GetHeadRevisionCommandMongo.java new file mode 100644 index 0000000..937144e --- /dev/null +++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/GetHeadRevisionCommandMongo.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.command; + +import org.apache.jackrabbit.mongomk.MongoConnection; +import org.apache.jackrabbit.mongomk.api.command.AbstractCommand; +import org.apache.jackrabbit.mongomk.query.FetchHeadRevisionQuery; + +/** + * A {@code Command} for getting the head revision from {@code MongoDB}. + * + * @author childNames = nodeMongo.getChildren(); + if (childNames != null) { + for (String childName : childNames) { + String childPath = PathUtils.concat(path, childName); + verified = verifyNodeHierarchyRec(childPath, ++currentDepth); + if (!verified) { + break; + } + } + } + } + } + + return verified; + } + + private boolean verifyProblematicNodes() { + boolean verified = true; + + for (Map.Entry entry : problematicNodes.entrySet()) { + String path = entry.getKey(); + Long revisionId = entry.getValue(); + + NodeMongo nodeMongo = pathAndNodeMap.get(path); + if (nodeMongo != null) { + if (!revisionId.equals(nodeMongo.getRevisionId())) { + verified = false; + + LOG.error(String + .format("Node could not be verified because the expected revisionId did not match: %d (expected) vs %d (actual)", + revisionId, nodeMongo.getRevisionId())); + + break; + } + } + } + + return verified; + } +} diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/NodeExistsCommandMongo.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/NodeExistsCommandMongo.java new file mode 100644 index 0000000..3999da7 --- /dev/null +++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/NodeExistsCommandMongo.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.command; + +import java.util.List; + +import org.apache.jackrabbit.mongomk.MongoConnection; +import org.apache.jackrabbit.mongomk.api.command.AbstractCommand; +import org.apache.jackrabbit.mongomk.model.NodeMongo; +import org.apache.jackrabbit.mongomk.query.FetchNodeByPathQuery; +import org.apache.jackrabbit.mongomk.util.MongoUtil; +import org.apache.jackrabbit.oak.commons.PathUtils; + +/** + * A {@code Command} for determine whether a node exists from {@code MongoDB}. + * + * @author { + + private final MongoConnection mongoConnection; + private final String blobId; + private final long blobOffset; + private final byte[] buffer; + private final int bufferOffset; + private final int length; + + public ReadBlobCommandMongo(MongoConnection mongoConnection, String blobId, long blobOffset, byte[] buffer, + int bufferOffset, int length) { + this.mongoConnection = mongoConnection; + this.blobId = blobId; + this.blobOffset = blobOffset; + this.buffer = buffer; + this.bufferOffset = bufferOffset; + this.length = length; + } + + @Override + public Integer execute() throws Exception { + return fetchBlobFromMongo(); + } + + // FIXME [Mete] This takes a long time, see MicroKernelIT#readBlob. See if + // it can be improved. + private int fetchBlobFromMongo() throws Exception { + GridFS gridFS = mongoConnection.getGridFS(); + GridFSDBFile gridFile = gridFS.findOne(new BasicDBObject("md5", blobId)); + long fileLength = gridFile.getLength(); + + long start = blobOffset; + long end = blobOffset + length; + if (end > fileLength) { + end = fileLength; + } + + int totalBytes = -1; + if (start < end) { + InputStream is = gridFile.getInputStream(); + IOUtils.skipFully(is, blobOffset); + totalBytes = is.read(buffer, bufferOffset, length); + is.close(); + } + return totalBytes; + } +} diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/WriteBlobCommandMongo.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/WriteBlobCommandMongo.java new file mode 100644 index 0000000..828a02e --- /dev/null +++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/command/WriteBlobCommandMongo.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.command; + +import java.io.BufferedInputStream; +import java.io.IOException; +import java.io.InputStream; + +import org.apache.commons.codec.digest.DigestUtils; +import org.apache.jackrabbit.mongomk.MongoConnection; +import org.apache.jackrabbit.mongomk.api.command.AbstractCommand; + +import com.mongodb.BasicDBObject; +import com.mongodb.gridfs.GridFS; +import com.mongodb.gridfs.GridFSDBFile; +import com.mongodb.gridfs.GridFSInputFile; + +public class WriteBlobCommandMongo extends AbstractCommand { + + private final MongoConnection mongoConnection; + private final InputStream is; + + public WriteBlobCommandMongo(MongoConnection mongoConnection, InputStream is) { + this.mongoConnection = mongoConnection; + this.is = is; + } + + @Override + public String execute() throws Exception { + return saveBlob(); + } + + private String saveBlob() throws IOException { + GridFS gridFS = mongoConnection.getGridFS(); + BufferedInputStream bis = new BufferedInputStream(is); + String md5 = calculateMd5(bis); + GridFSDBFile gridFile = gridFS.findOne(new BasicDBObject("md5", md5)); + if (gridFile != null) { + is.close(); + return md5; + } + + GridFSInputFile gridFSInputFile = gridFS.createFile(bis, true); + gridFSInputFile.save(); + return gridFSInputFile.getMD5(); + } + + private String calculateMd5(BufferedInputStream bis) throws IOException { + bis.mark(Integer.MAX_VALUE); + String md5 = DigestUtils.md5Hex(bis); + bis.reset(); + return md5; + } +} diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/model/CommitCommandInstructionVisitor.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/model/CommitCommandInstructionVisitor.java new file mode 100644 index 0000000..90b749e --- /dev/null +++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/model/CommitCommandInstructionVisitor.java @@ -0,0 +1,261 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.model; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.jackrabbit.mongomk.MongoConnection; +import org.apache.jackrabbit.mongomk.api.model.Instruction.AddNodeInstruction; +import org.apache.jackrabbit.mongomk.api.model.Instruction.AddPropertyInstruction; +import org.apache.jackrabbit.mongomk.api.model.Instruction.CopyNodeInstruction; +import org.apache.jackrabbit.mongomk.api.model.Instruction.MoveNodeInstruction; +import org.apache.jackrabbit.mongomk.api.model.Instruction.RemoveNodeInstruction; +import org.apache.jackrabbit.mongomk.api.model.Instruction.SetPropertyInstruction; +import org.apache.jackrabbit.mongomk.api.model.InstructionVisitor; +import org.apache.jackrabbit.mongomk.query.FetchNodeByPathQuery; +import org.apache.jackrabbit.oak.commons.PathUtils; + +public class CommitCommandInstructionVisitor implements InstructionVisitor { + + private final long headRevisionId; + private final MongoConnection mongoConnection; + private final Map pathNodeMap; + + public CommitCommandInstructionVisitor(MongoConnection mongoConnection, + long headRevisionId) { + this.mongoConnection = mongoConnection; + this.headRevisionId = headRevisionId; + pathNodeMap = new HashMap(); + } + + public Map getPathNodeMap() { + return pathNodeMap; + } + + @Override + public void visit(AddNodeInstruction instruction) { +// Old code +// String path = instruction.getPath(); +// getStagedNode(path); +// if (!PathUtils.denotesRoot(path)) { +// String parentPath = PathUtils.getParentPath(path); +// NodeMongo parentNode = getStagedNode(parentPath); +// parentNode.addChild(PathUtils.getName(path)); +// } + + String path = instruction.getPath(); + getStagedNode(path); + String nodeName = PathUtils.getName(path); + String parentNodePath = PathUtils.getParentPath(path); + NodeMongo parent = null; + if (!PathUtils.denotesRoot(parentNodePath)) { + parent = getStoredNode(parentNodePath); + if (parent == null) { + throw new RuntimeException("No such parent: " + PathUtils.getName(parentNodePath)); + } + // FIXME [Mete] Add once tests are fixed. + //if (parent.childExists(nodeName)) { + // throw new RuntimeException("There's already a child node with name '" + nodeName + "'"); + //} + } else { + parent = getStagedNode(parentNodePath); + } + parent.addChild(nodeName); + } + + @Override + public void visit(AddPropertyInstruction instruction) { + NodeMongo node = getStagedNode(instruction.getPath()); + node.addProperty(instruction.getKey(), instruction.getValue()); + } + + @Override + public void visit(CopyNodeInstruction instruction) { + String srcPath = instruction.getSourcePath(); + String destPath = instruction.getDestPath(); + + String srcParentPath = PathUtils.getParentPath(srcPath); + String srcNodeName = PathUtils.getName(srcPath); + + String destParentPath = PathUtils.getParentPath(destPath); + String destNodeName = PathUtils.getName(destPath); + + NodeMongo srcParent = pathNodeMap.get(srcParentPath); + if (srcParent == null) { + // The subtree to be copied has not been modified + boolean entryExists = getStoredNode(srcParentPath).childExists(srcNodeName); + if (!entryExists) { + throw new RuntimeException("Not found: " + srcPath); + } + NodeMongo destParent = getStagedNode(destParentPath); + if (destParent.childExists(destNodeName)) { + throw new RuntimeException("Node already exists at copy destination path: " + destPath); + } + + // Copy src node to destPath. + NodeMongo srcNode = getStoredNode(srcPath); + NodeMongo destNode = NodeMongo.fromDBObject(srcNode); + destNode.setPath(destPath); + // FIXME - [Mete] This needs to do proper merge instead of just add. + List addedChildren = srcNode.getAddedChildren(); + if (addedChildren != null && !addedChildren.isEmpty()) { + for (String child : addedChildren) { + getStagedNode(PathUtils.concat(destPath, child)); + destNode.addChild(child); + } + } + pathNodeMap.put(destPath, destNode); + + // Add to destParent. + destParent.addChild(destNodeName); + + return; + } + + boolean srcEntryExists = srcParent.childExists(srcNodeName); + if (!srcEntryExists) { + throw new RuntimeException(srcPath); + } + + // FIXME - [Mete] The rest is not totally correct. + NodeMongo destParent = getStagedNode(destParentPath); + NodeMongo srcNode = getStagedNode(srcPath); + + if (srcNode != null) { + // Copy the modified subtree + NodeMongo destNode = NodeMongo.fromDBObject(srcNode); + destNode.setPath(destPath); + pathNodeMap.put(destPath, destNode); + destParent.addChild(destNodeName); + //destParent.add(destNodeName, srcNode.copy()); + } else { + NodeMongo destNode = NodeMongo.fromDBObject(srcNode); + destNode.setPath(destPath); + pathNodeMap.put(destPath, destNode); + destParent.addChild(destNodeName); + //destParent.add(new ChildNodeEntry(destNodeName, srcEntry.getId())); + } + + // [Mete] Old code from Philipp. + // retrieve all nodes beyond and add them as new children to the dest location +// List childNodesToCopy = new FetchNodesByPathAndDepthQuery(mongoConnection, srcPath, +// revisionId, -1).execute(); +// for (NodeMongo nodeMongo : childNodesToCopy) { +// String oldPath = nodeMongo.getPath(); +// String oldPathRel = PathUtils.relativize(srcPath, oldPath); +// String newPath = PathUtils.concat(destPath, oldPathRel); +// +// nodeMongo.setPath(newPath); +// nodeMongo.removeField("_id"); +// pathNodeMap.put(newPath, nodeMongo); +// } + + // tricky part now: In case we already know about any changes to these existing nodes we need to merge + // those now. + } + + @Override + public void visit(MoveNodeInstruction instruction) { + String srcPath = instruction.getSourcePath(); + String destPath = instruction.getDestPath(); + + if (PathUtils.isAncestor(srcPath, destPath)) { + throw new RuntimeException("Target path cannot be descendant of source path: " + + destPath); + } + + String srcParentPath = PathUtils.getParentPath(srcPath); + String srcNodeName = PathUtils.getName(srcPath); + + String destParentPath = PathUtils.getParentPath(destPath); + String destNodeName = PathUtils.getName(destPath); + + // Add the old node with the new path. + NodeMongo destNode = pathNodeMap.get(destPath); + if (destNode == null) { + NodeMongo srcNode = getStoredNode(srcPath); + destNode = srcNode; + destNode.setPath(destPath); + pathNodeMap.put(destPath, destNode); + } + + // Remove from srcParent - [Mete] What if there is no such child? + NodeMongo scrParentNode = getStoredNode(srcParentPath); + scrParentNode.removeChild(srcNodeName); + + // Add to destParent + NodeMongo destParentNode = getStoredNode(destParentPath); + if (destParentNode.childExists(destNodeName)) { + throw new RuntimeException("Node already exists at move destination path: " + destPath); + } + destParentNode.addChild(destNodeName); + + // [Mete] Siblings? + } + + @Override + public void visit(RemoveNodeInstruction instruction) { + String path = instruction.getPath(); + String parentPath = PathUtils.getParentPath(path); + NodeMongo parentNode = getStagedNode(parentPath); + // [Mete] What if there is no such child? + parentNode.removeChild(PathUtils.getName(path)); + } + + @Override + public void visit(SetPropertyInstruction instruction) { + String path = instruction.getPath(); + String key = instruction.getKey(); + Object value = instruction.getValue(); + NodeMongo node = getStagedNode(path); + if (value == null) { + node.removeProp(key); + } else { + node.addProperty(key, value); + } + } + + // TODO - [Mete] I think we need a way to distinguish between Staged + // and Stored nodes. For example, what if a node is retrieved as Staged + // but later it needs to be retrieved as Stored? + private NodeMongo getStagedNode(String path) { + NodeMongo node = pathNodeMap.get(path); + if (node == null) { + node = new NodeMongo(); + node.setPath(path); + pathNodeMap.put(path, node); + } + return node; + } + + private NodeMongo getStoredNode(String path) { + NodeMongo node = pathNodeMap.get(path); + if (node == null) { + FetchNodeByPathQuery query = new FetchNodeByPathQuery(mongoConnection, + path, headRevisionId); + query.setFetchAll(true); + node = query.execute(); + if (node != null) { + node.removeField("_id"); + pathNodeMap.put(path, node); + } + } + return node; + } +} \ No newline at end of file diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/model/CommitMongo.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/model/CommitMongo.java new file mode 100644 index 0000000..d170309 --- /dev/null +++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/model/CommitMongo.java @@ -0,0 +1,155 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.model; + +import java.util.Date; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import org.apache.jackrabbit.mongomk.api.model.Commit; +import org.apache.jackrabbit.mongomk.api.model.Instruction; +import org.apache.jackrabbit.mongomk.api.model.Instruction.AddNodeInstruction; +import org.apache.jackrabbit.mongomk.util.MongoUtil; +import org.apache.jackrabbit.oak.commons.PathUtils; + +import com.mongodb.BasicDBObject; + +/** + * The {@code MongoDB} representation of a commit. + * + * @author affectedPaths = new HashSet(); + for (Instruction instruction : commit.getInstructions()) { + affectedPaths.add(instruction.getPath()); + + if (instruction instanceof AddNodeInstruction) { + affectedPaths.add(PathUtils.getParentPath(instruction.getPath())); + } + } + commitMongo.setAffectedPaths(new LinkedList(affectedPaths)); + + return commitMongo; + } + + public CommitMongo() { + setTimestamp(new Date().getTime()); + } + + @SuppressWarnings("unchecked") + public List getAffectedPaths() { + return (List) this.get(KEY_AFFECTED_PATH); + } + + public long getBaseRevisionId() { + return getLong(KEY_BASE_REVISION_ID); + } + + public String getDiff() { + return getString(KEY_DIFF); + } + + public String getMessage() { + return getString(KEY_MESSAGE); + } + + public String getPath() { + return getString(KEY_PATH); + } + + public long getRevisionId() { + return getLong(KEY_REVISION_ID); + } + + public boolean hasFailed() { + return this.getBoolean(KEY_FAILED); + } + + public void setAffectedPaths(List affectedPaths) { + put(KEY_AFFECTED_PATH, affectedPaths); + } + + public void setBaseRevId(long baseRevisionId) { + put(KEY_BASE_REVISION_ID, baseRevisionId); + } + + public void setDiff(String diff) { + put(KEY_DIFF, diff); + } + + public void setFailed() { + put(KEY_FAILED, Boolean.TRUE); + } + + public void setMessage(String message) { + put(KEY_MESSAGE, message); + } + + public void setPath(String path) { + put(KEY_PATH, path); + } + + public void setRevisionId(long revisionId) { + put(KEY_REVISION_ID, revisionId); + } + + public void setRevisionId(String revisionId) { + this.setRevisionId(MongoUtil.toMongoRepresentation(revisionId)); + } + + public void setTimestamp(long timestamp) { + put(KEY_TIMESTAMP, timestamp); + } + + public Long getTimestamp() { + return getLong(KEY_TIMESTAMP); + } +} \ No newline at end of file diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/model/HeadMongo.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/model/HeadMongo.java new file mode 100644 index 0000000..1c43036 --- /dev/null +++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/model/HeadMongo.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.model; + +import com.mongodb.BasicDBObject; +import com.mongodb.DBObject; + +/** + * The {@code MongoDB} representation of the head revision. + * + * @author properties = node.getProperties(); + if (properties != null) { + nodeMongo.setProperties(properties); + } + + Set children = node.getChildren(); + if (children != null) { + List childNames = new LinkedList(); + for (Node child : children) { + childNames.add(child.getName()); + } + nodeMongo.setChildren(childNames); + } + + return nodeMongo; + } + + public static Set fromNodes(Collection nodes) { + Set nodeMongos = new HashSet(nodes.size()); + for (Node node : nodes) { + NodeMongo nodeMongo = NodeMongo.fromNode(node); + nodeMongos.add(nodeMongo); + } + + return nodeMongos; + } + + public static List toNode(Collection nodeMongos) { + List nodes = new ArrayList(nodeMongos.size()); + for (NodeMongo nodeMongo : nodeMongos) { + Node node = NodeMongo.toNode(nodeMongo); + nodes.add(node); + } + + return nodes; + } + + public static NodeImpl toNode(NodeMongo nodeMongo) { + String revisionId = String.valueOf(nodeMongo.getRevisionId()); + String path = nodeMongo.getPath(); + List childNames = nodeMongo.getChildren(); + long childCount = childNames != null ? childNames.size() : 0; + Map properties = nodeMongo.getProperties(); + Set children = null; + if (childNames != null) { + children = new HashSet(); + for (String childName : childNames) { + NodeImpl child = new NodeImpl(); + child.setPath(PathUtils.concat(path, childName)); + children.add(child); + } + } + + NodeImpl nodeImpl = new NodeImpl(); + nodeImpl.setPath(path); + nodeImpl.setChildCount(childCount); + nodeImpl.setRevisionId(revisionId); + nodeImpl.setProperties(properties); + nodeImpl.setChildren(children); + + return nodeImpl; + } + + private List addedChildren; + private Map addedProps; + private List removedChildren; + private Map removedProps; + + public void addChild(String childName) { + if (addedChildren == null) { + addedChildren = new LinkedList(); + } + + addedChildren.add(childName); + } + + public void addProperty(String key, Object value) { + if (addedProps == null) { + addedProps = new HashMap(); + } + + addedProps.put(key, value); + } + + public List getAddedChildren() { + return addedChildren; + } + + public Map getAddedProps() { + return addedProps; + } + + @SuppressWarnings("unchecked") + public List getChildren() { + return (List) this.get(KEY_CHILDREN); + } + + public boolean childExists(String childName) { + List children = getChildren(); + if (children != null && !children.isEmpty()) { + if (children.contains(childName)) { + return true; + } + } + return addedChildExists(childName); + } + + private boolean addedChildExists(String childName) { + return addedChildren != null && !addedChildren.isEmpty()? + addedChildren.contains(childName) : false; + } + + public String getName() { + return PathUtils.getName(getString(KEY_PATH)); + } + + public String getPath() { + return getString(KEY_PATH); + } + + @SuppressWarnings("unchecked") + public Map getProperties() { + return (Map) this.get(KEY_PROPERTIES); + } + + public List getRemovedChildren() { + return removedChildren; + } + + public Map getRemovedProps() { + return removedProps; + } + + public Long getRevisionId() { + return getLong(KEY_REVISION_ID); + } + + public void removeChild(String childName) { + if (removedChildren == null) { + removedChildren = new LinkedList(); + } + + removedChildren.add(childName); + } + + public void removeProp(String key) { + if (removedProps == null) { + removedProps = new HashMap(); + } + + removedProps.put(key, null); + } + + public void setBaseRevisionId(long baseRevisionId) { + put(KEY_BASE_REVISION_ID, baseRevisionId); + } + + public void setChildren(List children) { + if (children != null) { + put(KEY_CHILDREN, children); + } else { + removeField(KEY_CHILDREN); + } + } + + public void setPath(String path) { + put(KEY_PATH, path); + } + + public void setProperties(Map properties) { + if (properties != null) { + put(KEY_PROPERTIES, properties); + } else { + removeField(KEY_PROPERTIES); + } + } + + public void setRevisionId(long revisionId) { + put(KEY_REVISION_ID, revisionId); + } + + public void setRevisionId(String revisionId) { + this.setRevisionId(MongoUtil.toMongoRepresentation(revisionId)); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(super.toString()); + sb.append(" internal props: "); + sb.append("AddedChildren = "); + sb.append(addedChildren); + sb.append(", RemovedChildren = "); + sb.append(removedChildren); + sb.append(", AddedProps = "); + sb.append(addedProps); + sb.append(", RemovedProps = "); + sb.append(removedProps); + + return sb.toString(); + } +} diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/AbstractQuery.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/AbstractQuery.java new file mode 100644 index 0000000..dbc40e9 --- /dev/null +++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/AbstractQuery.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.query; + +import org.apache.jackrabbit.mongomk.MongoConnection; + +/** + * An abstract base class for queries performed with {@code MongoDB}. + * + * @param + * The result type of the query. + * + * @author 0) { + queryBuilder = queryBuilder.and(NodeMongo.KEY_REVISION_ID).is(revisionId); + } + DBObject query = queryBuilder.get(); + + DBObject filter = null; + if (!fetchAll) { + QueryBuilder filterBuilder = QueryBuilder.start(NodeMongo.KEY_REVISION_ID).is(1); + filterBuilder.and(NodeMongo.KEY_CHILDREN).is(1); + filter = filterBuilder.get(); + } + + NodeMongo nodeMongo = (NodeMongo) nodeCollection.findOne(query, filter); + + return nodeMongo; + } + + private boolean revisionIdExists() { + if (revisionId == 0) { + return true; + } + FetchValidRevisionsQuery query = new FetchValidRevisionsQuery(mongoConnection, String.valueOf(Long.MAX_VALUE)); + List revisionIds = query.execute(); + return revisionIds.contains(revisionId); + } +} diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/FetchNodesByPathAndDepthQuery.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/FetchNodesByPathAndDepthQuery.java new file mode 100644 index 0000000..ffdceda --- /dev/null +++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/FetchNodesByPathAndDepthQuery.java @@ -0,0 +1,120 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.query; + +import java.util.List; +import java.util.regex.Pattern; + +import org.apache.jackrabbit.mongomk.MongoConnection; +import org.apache.jackrabbit.mongomk.model.NodeMongo; +import org.apache.jackrabbit.mongomk.util.MongoUtil; +import org.apache.log4j.Logger; + +import com.mongodb.DBCollection; +import com.mongodb.DBCursor; +import com.mongodb.DBObject; +import com.mongodb.QueryBuilder; + +/** + * An query for fetching nodes by path and depth. + * + * @author 0) { + sb.append("^"); + if (!"/".equals(path)) { + sb.append(path); + } + sb.append("(/[^/]*)"); + sb.append("{0,"); + sb.append(depth); + sb.append("}$"); + } + + Pattern pattern = Pattern.compile(sb.toString()); + + return pattern; + } + + private List fetchValidRevisions(MongoConnection mongoConnection, String revisionId) { + return new FetchValidRevisionsQuery(mongoConnection, revisionId).execute(); + } + + private DBCursor performQuery(Pattern pattern) { + DBCollection nodeCollection = mongoConnection.getNodeCollection(); + + QueryBuilder qb = QueryBuilder.start(NodeMongo.KEY_PATH).regex(pattern); + if (revisionId != null) { + qb = qb.and(NodeMongo.KEY_REVISION_ID).lessThanEquals(MongoUtil.toMongoRepresentation(revisionId)); + } + + DBObject query = qb.get(); + + LOG.debug(String.format("Executing query: %s", query)); + + DBCursor dbCursor = nodeCollection.find(query); + + return dbCursor; + } +} diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/FetchNodesForRevisionQuery.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/FetchNodesForRevisionQuery.java new file mode 100644 index 0000000..6df5332 --- /dev/null +++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/FetchNodesForRevisionQuery.java @@ -0,0 +1,99 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.query; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.apache.jackrabbit.mongomk.MongoConnection; +import org.apache.jackrabbit.mongomk.model.NodeMongo; +import org.apache.jackrabbit.mongomk.util.MongoUtil; +import org.apache.log4j.Logger; + +import com.mongodb.DBCollection; +import com.mongodb.DBCursor; +import com.mongodb.DBObject; +import com.mongodb.QueryBuilder; + +/** + * An query for fetching nodes for a specific revision. + * + * @author existingRevId) { + nodeMongos.put(path, nodeMongo); + LOG.debug(String.format("Converted nodes was put into map and replaced %s (%d)", path, revId)); + } else { + LOG.debug(String.format( + "Converted nodes was not put into map because a newer version is available %s (%d)", path, + revId)); + } + } else { + nodeMongos.put(path, nodeMongo); + LOG.debug("Converted node was put into map"); + } + } + + return new ArrayList(nodeMongos.values()); + } + + private QueryUtils() { + // no initialization + } +} diff --git a/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/ReadAndIncHeadRevisionQuery.java b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/ReadAndIncHeadRevisionQuery.java new file mode 100644 index 0000000..0b4b566 --- /dev/null +++ b/oak-mongomk/src/main/java/org/apache/jackrabbit/mongomk/query/ReadAndIncHeadRevisionQuery.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.query; + +import org.apache.jackrabbit.mongomk.MongoConnection; +import org.apache.jackrabbit.mongomk.model.HeadMongo; + +import com.mongodb.BasicDBObject; +import com.mongodb.DBCollection; +import com.mongodb.DBObject; + +/** + * An query for reading and incrementing the head revisio id. + * + * @author A typical invocation sequence is thus + *
+ *     Mongo mongo = new Mongo( new DBAddress( "localhost", 127017 ) );
+ *     DB db = mongo.getDB( "mydb" );
+ *     DBCollection collection = db.getCollection( "test" );
+ * 
+ * @dochub collections + */ +@SuppressWarnings( {"unchecked", "rawtypes" }) +public abstract class DBCollection { + + // THIS HAS BEEN PATCHED TO REMOVE SOME FINAL MODIFIER ON METHODS WHICH NEEDED TO BE MOCKED FOR TESTING + + /** + * Saves document(s) to the database. + * if doc doesn't have an _id, one will be added + * you can get the _id that was added from doc after the insert + * + * @param arr array of documents to save + * @param concern the write concern + * @return + * @throws MongoException + * @dochub insert + */ + public WriteResult insert(DBObject[] arr , WriteConcern concern ) throws MongoException { + return insert( arr, concern, getDBEncoderFactory().create() ); + } + + /** + * Saves document(s) to the database. + * if doc doesn't have an _id, one will be added + * you can get the _id that was added from doc after the insert + * + * @param arr array of documents to save + * @param concern the write concern + * @param encoder the DBEncoder to use + * @return + * @throws MongoException + * @dochub insert + */ + public abstract WriteResult insert(DBObject[] arr , WriteConcern concern, DBEncoder encoder) throws MongoException; + + /** + * Inserts a document into the database. + * if doc doesn't have an _id, one will be added + * you can get the _id that was added from doc after the insert + * + * @param o + * @param concern the write concern + * @return + * @throws MongoException + * @dochub insert + */ + public WriteResult insert(DBObject o , WriteConcern concern ) + throws MongoException { + return insert( new DBObject[]{ o } , concern ); + } + + /** + * Saves document(s) to the database. + * if doc doesn't have an _id, one will be added + * you can get the _id that was added from doc after the insert + * + * @param arr array of documents to save + * @return + * @throws MongoException + * @dochub insert + */ + public WriteResult insert(DBObject ... arr) + throws MongoException { + return insert( arr , getWriteConcern() ); + } + + /** + * Saves document(s) to the database. + * if doc doesn't have an _id, one will be added + * you can get the _id that was added from doc after the insert + * + * @param arr array of documents to save + * @return + * @throws MongoException + * @dochub insert + */ + public WriteResult insert(WriteConcern concern, DBObject ... arr) + throws MongoException { + return insert( arr, concern ); + } + + /** + * Saves document(s) to the database. + * if doc doesn't have an _id, one will be added + * you can get the _id that was added from doc after the insert + * + * @param list list of documents to save + * @return + * @throws MongoException + * @dochub insert + */ + public WriteResult insert(List list ) + throws MongoException { + return insert( list, getWriteConcern() ); + } + + /** + * Saves document(s) to the database. + * if doc doesn't have an _id, one will be added + * you can get the _id that was added from doc after the insert + * + * @param list list of documents to save + * @param concern the write concern + * @return + * @throws MongoException + * @dochub insert + */ + public WriteResult insert(List list, WriteConcern concern ) + throws MongoException { + return insert( list.toArray( new DBObject[list.size()] ) , concern ); + } + + /** + * Performs an update operation. + * @param q search query for old object to update + * @param o object with which to update q + * @param upsert if the database should create the element if it does not exist + * @param multi if the update should be applied to all objects matching (db version 1.1.3 and above). An object will + * not be inserted if it does not exist in the collection and upsert=true and multi=true. + * See
http://www.mongodb.org/display/DOCS/Atomic+Operations + * @param concern the write concern + * @return + * @throws MongoException + * @dochub update + */ + public WriteResult update( DBObject q , DBObject o , boolean upsert , boolean multi , WriteConcern concern ) throws MongoException { + return update( q, o, upsert, multi, concern, getDBEncoderFactory().create() ); + } + + /** + * Performs an update operation. + * @param q search query for old object to update + * @param o object with which to update q + * @param upsert if the database should create the element if it does not exist + * @param multi if the update should be applied to all objects matching (db version 1.1.3 and above). An object will + * not be inserted if it does not exist in the collection and upsert=true and multi=true. + * See http://www.mongodb.org/display/DOCS/Atomic+Operations + * @param concern the write concern + * @param encoder the DBEncoder to use + * @return + * @throws MongoException + * @dochub update + */ + public abstract WriteResult update( DBObject q , DBObject o , boolean upsert , boolean multi , WriteConcern concern, DBEncoder encoder ) throws MongoException ; + + /** + * calls {@link DBCollection#update(com.mongodb.DBObject, com.mongodb.DBObject, boolean, boolean, com.mongodb.WriteConcern)} with default WriteConcern. + * @param q search query for old object to update + * @param o object with which to update q + * @param upsert if the database should create the element if it does not exist + * @param multi if the update should be applied to all objects matching (db version 1.1.3 and above) + * See http://www.mongodb.org/display/DOCS/Atomic+Operations + * @return + * @throws MongoException + * @dochub update + */ + public WriteResult update( DBObject q , DBObject o , boolean upsert , boolean multi ) + throws MongoException { + return update( q , o , upsert , multi , getWriteConcern() ); + } + + /** + * calls {@link DBCollection#update(com.mongodb.DBObject, com.mongodb.DBObject, boolean, boolean)} with upsert=false and multi=false + * @param q search query for old object to update + * @param o object with which to update q + * @return + * @throws MongoException + * @dochub update + */ + public WriteResult update( DBObject q , DBObject o ) throws MongoException { + return update( q , o , false , false ); + } + + /** + * calls {@link DBCollection#update(com.mongodb.DBObject, com.mongodb.DBObject, boolean, boolean)} with upsert=false and multi=true + * @param q search query for old object to update + * @param o object with which to update q + * @return + * @throws MongoException + * @dochub update + */ + public WriteResult updateMulti( DBObject q , DBObject o ) throws MongoException { + return update( q , o , false , true ); + } + + /** + * Adds any necessary fields to a given object before saving it to the collection. + * @param o object to which to add the fields + */ + protected abstract void doapply( DBObject o ); + + /** + * Removes objects from the database collection. + * @param o the object that documents to be removed must match + * @param concern WriteConcern for this operation + * @return + * @throws MongoException + * @dochub remove + */ + public WriteResult remove( DBObject o , WriteConcern concern ) throws MongoException { + return remove( o, concern, getDBEncoderFactory().create() ); + } + + /** + * Removes objects from the database collection. + * @param o the object that documents to be removed must match + * @param concern WriteConcern for this operation + * @param encoder the DBEncoder to use + * @return + * @throws MongoException + * @dochub remove + */ + public abstract WriteResult remove( DBObject o , WriteConcern concern, DBEncoder encoder ) throws MongoException ; + + /** + * calls {@link DBCollection#remove(com.mongodb.DBObject, com.mongodb.WriteConcern)} with the default WriteConcern + * @param o the object that documents to be removed must match + * @return + * @throws MongoException + * @dochub remove + */ + public WriteResult remove( DBObject o ) + throws MongoException { + return remove( o , getWriteConcern() ); + } + + + /** + * Finds objects + */ + abstract Iterator __find( DBObject ref , DBObject fields , int numToSkip , int batchSize , int limit, int options, ReadPreference readPref, DBDecoder decoder ) throws MongoException ; + + /** + * Calls {@link DBCollection#find(com.mongodb.DBObject, com.mongodb.DBObject, int, int)} and applies the query options + * @param query query used to search + * @param fields the fields of matching objects to return + * @param numToSkip number of objects to skip + * @param batchSize the batch size. This option has a complex behavior, see {@link DBCursor#batchSize(int) } + * @param options - see Bytes QUERYOPTION_* + * @return the cursor + * @throws MongoException + * @dochub find + */ + @Deprecated + public final DBCursor find( DBObject query , DBObject fields , int numToSkip , int batchSize , int options ) throws MongoException{ + return find(query, fields, numToSkip, batchSize).addOption(options); + } + + + /** + * Finds objects from the database that match a query. + * A DBCursor object is returned, that can be iterated to go through the results. + * + * @param query query used to search + * @param fields the fields of matching objects to return + * @param numToSkip number of objects to skip + * @param batchSize the batch size. This option has a complex behavior, see {@link DBCursor#batchSize(int) } + * @return the cursor + * @throws MongoException + * @dochub find + */ + @Deprecated + public final DBCursor find( DBObject query , DBObject fields , int numToSkip , int batchSize ) { + DBCursor cursor = find(query, fields).skip(numToSkip).batchSize(batchSize); + return cursor; + } + + // ------ + + /** + * Finds an object by its id. + * This compares the passed in value to the _id field of the document + * + * @param obj any valid object + * @return the object, if found, otherwise null + * @throws MongoException + */ + public final DBObject findOne( Object obj ) + throws MongoException { + return findOne(obj, null); + } + + + /** + * Finds an object by its id. + * This compares the passed in value to the _id field of the document + * + * @param obj any valid object + * @param fields fields to return + * @return the object, if found, otherwise null + * @dochub find + */ + public final DBObject findOne( Object obj, DBObject fields ) { + Iterator iterator = __find(new BasicDBObject("_id", obj), fields, 0, -1, 0, getOptions(), getReadPreference(), _decoderFactory.create() ); + return (iterator != null ? iterator.next() : null); + } + + /** + * Finds the first document in the query and updates it. + * @param query query to match + * @param fields fields to be returned + * @param sort sort to apply before picking first document + * @param remove if true, document found will be removed + * @param update update to apply + * @param returnNew if true, the updated document is returned, otherwise the old document is returned (or it would be lost forever) + * @param upsert do upsert (insert if document not present) + * @return the document + */ + public DBObject findAndModify(DBObject query, DBObject fields, DBObject sort, boolean remove, DBObject update, boolean returnNew, boolean upsert) { + + BasicDBObject cmd = new BasicDBObject( "findandmodify", _name); + if (query != null && !query.keySet().isEmpty()) + cmd.append( "query", query ); + if (fields != null && !fields.keySet().isEmpty()) + cmd.append( "fields", fields ); + if (sort != null && !sort.keySet().isEmpty()) + cmd.append( "sort", sort ); + + if (remove) + cmd.append( "remove", remove ); + else { + if (update != null && !update.keySet().isEmpty()) { + // if 1st key doesnt start with $, then object will be inserted as is, need to check it + String key = update.keySet().iterator().next(); + if (key.charAt(0) != '$') + _checkObject(update, false, false); + cmd.append( "update", update ); + } + if (returnNew) + cmd.append( "new", returnNew ); + if (upsert) + cmd.append( "upsert", upsert ); + } + + if (remove && !(update == null || update.keySet().isEmpty() || returnNew)) + throw new MongoException("FindAndModify: Remove cannot be mixed with the Update, or returnNew params!"); + + CommandResult res = this._db.command( cmd ); + if (res.ok() || res.getErrorMessage().equals( "No matching object found" )) + return (DBObject) res.get( "value" ); + res.throwOnError(); + return null; + } + + + /** + * calls {@link DBCollection#findAndModify(com.mongodb.DBObject, com.mongodb.DBObject, com.mongodb.DBObject, boolean, com.mongodb.DBObject, boolean, boolean)} + * with fields=null, remove=false, returnNew=false, upsert=false + * @param query + * @param sort + * @param update + * @return the old document + */ + public DBObject findAndModify( DBObject query , DBObject sort , DBObject update){ + return findAndModify( query, null, sort, false, update, false, false); + } + + /** + * calls {@link DBCollection#findAndModify(com.mongodb.DBObject, com.mongodb.DBObject, com.mongodb.DBObject, boolean, com.mongodb.DBObject, boolean, boolean)} + * with fields=null, sort=null, remove=false, returnNew=false, upsert=false + * @param query + * @param update + * @return the old document + */ + public DBObject findAndModify( DBObject query , DBObject update ) { + return findAndModify( query, null, null, false, update, false, false ); + } + + /** + * calls {@link DBCollection#findAndModify(com.mongodb.DBObject, com.mongodb.DBObject, com.mongodb.DBObject, boolean, com.mongodb.DBObject, boolean, boolean)} + * with fields=null, sort=null, remove=true, returnNew=false, upsert=false + * @param query + * @return the removed document + */ + public DBObject findAndRemove( DBObject query ) { + return findAndModify( query, null, null, true, null, false, false ); + } + + // --- START INDEX CODE --- + + /** + * calls {@link DBCollection#createIndex(com.mongodb.DBObject, com.mongodb.DBObject)} with default index options + * @param keys an object with a key set of the fields desired for the index + * @throws MongoException + */ + public final void createIndex( final DBObject keys ) + throws MongoException { + createIndex( keys , defaultOptions( keys ) ); + } + + /** + * Forces creation of an index on a set of fields, if one does not already exist. + * @param keys + * @param options + * @throws MongoException + */ + public void createIndex( DBObject keys , DBObject options ) throws MongoException { + createIndex( keys, options, getDBEncoderFactory().create() ); + } + + /** + * Forces creation of an index on a set of fields, if one does not already exist. + * @param keys + * @param options + * @param encoder the DBEncoder to use + * @throws MongoException + */ + public abstract void createIndex( DBObject keys , DBObject options, DBEncoder encoder ) throws MongoException; + + /** + * Creates an ascending index on a field with default options, if one does not already exist. + * @param name name of field to index on + */ + public final void ensureIndex( final String name ){ + ensureIndex( new BasicDBObject( name , 1 ) ); + } + + /** + * calls {@link DBCollection#ensureIndex(com.mongodb.DBObject, com.mongodb.DBObject)} with default options + * @param keys an object with a key set of the fields desired for the index + * @throws MongoException + */ + public final void ensureIndex( final DBObject keys ) + throws MongoException { + ensureIndex( keys , defaultOptions( keys ) ); + } + + /** + * calls {@link DBCollection#ensureIndex(com.mongodb.DBObject, java.lang.String, boolean)} with unique=false + * @param keys fields to use for index + * @param name an identifier for the index + * @throws MongoException + * @dochub indexes + */ + public void ensureIndex( DBObject keys , String name ) + throws MongoException { + ensureIndex( keys , name , false ); + } + + /** + * Ensures an index on this collection (that is, the index will be created if it does not exist). + * @param keys fields to use for index + * @param name an identifier for the index. If null or empty, the default name will be used. + * @param unique if the index should be unique + * @throws MongoException + */ + public void ensureIndex( DBObject keys , String name , boolean unique ) + throws MongoException { + DBObject options = defaultOptions( keys ); + if (name != null && name.length()>0) + options.put( "name" , name ); + if ( unique ) + options.put( "unique" , Boolean.TRUE ); + ensureIndex( keys , options ); + } + + /** + * Creates an index on a set of fields, if one does not already exist. + * @param keys an object with a key set of the fields desired for the index + * @param optionsIN options for the index (name, unique, etc) + * @throws MongoException + */ + public final void ensureIndex( final DBObject keys , final DBObject optionsIN ) + throws MongoException { + + if ( checkReadOnly( false ) ) return; + + final DBObject options = defaultOptions( keys ); + for ( String k : optionsIN.keySet() ) + options.put( k , optionsIN.get( k ) ); + + final String name = options.get( "name" ).toString(); + + if ( _createdIndexes.contains( name ) ) + return; + + createIndex( keys , options ); + _createdIndexes.add( name ); + } + + /** + * Clears all indices that have not yet been applied to this collection. + */ + public void resetIndexCache(){ + _createdIndexes.clear(); + } + + DBObject defaultOptions( DBObject keys ){ + DBObject o = new BasicDBObject(); + o.put( "name" , genIndexName( keys ) ); + o.put( "ns" , _fullName ); + return o; + } + + /** + * Convenience method to generate an index name from the set of fields it is over. + * @param keys the names of the fields used in this index + * @return a string representation of this index's fields + */ + public static String genIndexName( DBObject keys ){ + StringBuilder name = new StringBuilder(); + for ( String s : keys.keySet() ){ + if ( name.length() > 0 ) + name.append( '_' ); + name.append( s ).append( '_' ); + Object val = keys.get( s ); + if ( val instanceof Number || val instanceof String ) + name.append( val.toString().replace( ' ', '_' ) ); + } + return name.toString(); + } + + // --- END INDEX CODE --- + + /** + * Set hint fields for this collection (to optimize queries). + * @param lst a list of DBObjects to be used as hints + */ + public void setHintFields( List lst ){ + _hintFields = lst; + } + + /** + * Queries for an object in this collection. + * @param ref object for which to search + * @return an iterator over the results + * @dochub find + */ + public DBCursor find( DBObject ref ){ + return new DBCursor( this, ref, null, getReadPreference()); + } + + /** + * Queries for an object in this collection. + * + *

+ * An empty DBObject will match every document in the collection. + * Regardless of fields specified, the _id fields are always returned. + *

+ *

+ * An example that returns the "x" and "_id" fields for every document + * in the collection that has an "x" field: + *

+ *
+     * BasicDBObject keys = new BasicDBObject();
+     * keys.put("x", 1);
+     *
+     * DBCursor cursor = collection.find(new BasicDBObject(), keys);
+     * 
+ * + * @param ref object for which to search + * @param keys fields to return + * @return a cursor to iterate over results + * @dochub find + */ + public DBCursor find( DBObject ref , DBObject keys ){ + return new DBCursor( this, ref, keys, getReadPreference()); + } + + + /** + * Queries for all objects in this collection. + * @return a cursor which will iterate over every object + * @dochub find + */ + public DBCursor find(){ + return new DBCursor( this, null, null, getReadPreference()); + } + + /** + * Returns a single object from this collection. + * @return the object found, or null if the collection is empty + * @throws MongoException + */ + public DBObject findOne() + throws MongoException { + return findOne( new BasicDBObject() ); + } + + /** + * Returns a single object from this collection matching the query. + * @param o the query object + * @return the object found, or null if no such object exists + * @throws MongoException + */ + public DBObject findOne( DBObject o ) + throws MongoException { + return findOne( o, null, getReadPreference()); + } + + /** + * Returns a single object from this collection matching the query. + * @param o the query object + * @param fields fields to return + * @return the object found, or null if no such object exists + * @dochub find + */ + public DBObject findOne( DBObject o, DBObject fields ) { + return findOne( o, fields, getReadPreference()); + } + /** + * Returns a single object from this collection matching the query. + * @param o the query object + * @param fields fields to return + * @return the object found, or null if no such object exists + * @dochub find + */ + public DBObject findOne( DBObject o, DBObject fields, ReadPreference readPref ) { + Iterator i = __find( o , fields , 0 , -1 , 0, getOptions(), readPref, _decoderFactory.create() ); + DBObject obj = (i == null ? null : i.next()); + if ( obj != null && ( fields != null && fields.keySet().size() > 0 ) ){ + obj.markAsPartialObject(); + } + return obj; + } + + + /** + * calls {@link DBCollection#apply(com.mongodb.DBObject, boolean)} with ensureID=true + * @param o DBObject to which to add fields + * @return the modified parameter object + */ + public final Object apply( DBObject o ){ + return apply( o , true ); + } + + /** + * calls {@link DBCollection#doapply(com.mongodb.DBObject)}, optionally adding an automatic _id field + * @param jo object to add fields to + * @param ensureID whether to add an _id field + * @return the modified object o + */ + public final Object apply( DBObject jo , boolean ensureID ){ + + Object id = jo.get( "_id" ); + if ( ensureID && id == null ){ + id = ObjectId.get(); + jo.put( "_id" , id ); + } + + doapply( jo ); + + return id; + } + + /** + * calls {@link DBCollection#save(com.mongodb.DBObject, com.mongodb.WriteConcern)} with default WriteConcern + * @param jo the DBObject to save + * will add _id field to jo if needed + * @return + */ + public final WriteResult save( DBObject jo ) { + return save(jo, getWriteConcern()); + } + + /** + * Saves an object to this collection (does insert or update based on the object _id). + * @param jo the DBObject to save + * @param concern the write concern + * @return + * @throws MongoException + */ + public final WriteResult save( DBObject jo, WriteConcern concern ) + throws MongoException { + if ( checkReadOnly( true ) ) + return null; + + _checkObject( jo , false , false ); + + Object id = jo.get( "_id" ); + + if ( id == null || ( id instanceof ObjectId && ((ObjectId)id).isNew() ) ){ + if ( id != null && id instanceof ObjectId ) + ((ObjectId)id).notNew(); + if ( concern == null ) + return insert( jo ); + else + return insert( jo, concern ); + } + + DBObject q = new BasicDBObject(); + q.put( "_id" , id ); + if ( concern == null ) + return update( q , jo , true , false ); + else + return update( q , jo , true , false , concern ); + + } + + // ---- DB COMMANDS ---- + /** + * Drops all indices from this collection + * @throws MongoException + */ + public void dropIndexes() + throws MongoException { + dropIndexes( "*" ); + } + + + /** + * Drops an index from this collection + * @param name the index name + * @throws MongoException + */ + public void dropIndexes( String name ) + throws MongoException { + DBObject cmd = BasicDBObjectBuilder.start() + .add( "deleteIndexes" , getName() ) + .add( "index" , name ) + .get(); + + resetIndexCache(); + CommandResult res = _db.command( cmd ); + if (res.ok() || res.getErrorMessage().equals( "ns not found" )) + return; + res.throwOnError(); + } + + /** + * Drops (deletes) this collection. Use with care. + * @throws MongoException + */ + public void drop() + throws MongoException { + resetIndexCache(); + CommandResult res =_db.command( BasicDBObjectBuilder.start().add( "drop" , getName() ).get() ); + if (res.ok() || res.getErrorMessage().equals( "ns not found" )) + return; + res.throwOnError(); + } + + /** + * returns the number of documents in this collection. + * @return + * @throws MongoException + */ + public long count() + throws MongoException { + return getCount(new BasicDBObject(), null); + } + + /** + * returns the number of documents that match a query. + * @param query query to match + * @return + * @throws MongoException + */ + public long count(DBObject query) + throws MongoException { + return getCount(query, null); + } + + + /** + * calls {@link DBCollection#getCount(com.mongodb.DBObject, com.mongodb.DBObject)} with an empty query and null fields. + * @return number of documents that match query + * @throws MongoException + */ + public long getCount() + throws MongoException { + return getCount(new BasicDBObject(), null); + } + + /** + * calls {@link DBCollection#getCount(com.mongodb.DBObject, com.mongodb.DBObject)} with null fields. + * @param query query to match + * @return + * @throws MongoException + */ + public long getCount(DBObject query) + throws MongoException { + return getCount(query, null); + } + + /** + * calls {@link DBCollection#getCount(com.mongodb.DBObject, com.mongodb.DBObject, long, long)} with limit=0 and skip=0 + * @param query query to match + * @param fields fields to return + * @return + * @throws MongoException + */ + public long getCount(DBObject query, DBObject fields) + throws MongoException { + return getCount( query , fields , 0 , 0 ); + } + + /** + * Returns the number of documents in the collection + * that match the specified query + * + * @param query query to select documents to count + * @param fields fields to return + * @param limit limit the count to this value + * @param skip number of entries to skip + * @return number of documents that match query and fields + * @throws MongoException + */ + public long getCount(DBObject query, DBObject fields, long limit, long skip ) + throws MongoException { + + BasicDBObject cmd = new BasicDBObject(); + cmd.put("count", getName()); + cmd.put("query", query); + if (fields != null) { + cmd.put("fields", fields); + } + + if ( limit > 0 ) + cmd.put( "limit" , limit ); + if ( skip > 0 ) + cmd.put( "skip" , skip ); + + CommandResult res = _db.command(cmd,getOptions()); + + if ( ! res.ok() ){ + String errmsg = res.getErrorMessage(); + + if ( errmsg.equals("ns does not exist") || + errmsg.equals("ns missing" ) ){ + // for now, return 0 - lets pretend it does exist + return 0; + } + + res.throwOnError(); + } + + return res.getLong("n"); + } + + /** + * Calls {@link DBCollection#rename(java.lang.String, boolean)} with dropTarget=false + * @param newName new collection name (not a full namespace) + * @return the new collection + * @throws MongoException + */ + public DBCollection rename( String newName ) + throws MongoException { + return rename(newName, false); + } + + /** + * renames of this collection to newName + * @param newName new collection name (not a full namespace) + * @param dropTarget if a collection with the new name exists, whether or not to drop it + * @return the new collection + * @throws MongoException + */ + public DBCollection rename( String newName, boolean dropTarget ) + throws MongoException { + CommandResult ret = + _db.getSisterDB( "admin" ) + .command( BasicDBObjectBuilder.start() + .add( "renameCollection" , _fullName ) + .add( "to" , _db._name + "." + newName ) + .add( "dropTarget" , dropTarget ) + .get() ); + ret.throwOnError(); + resetIndexCache(); + return _db.getCollection( newName ); + } + + /** + * calls {@link DBCollection#group(com.mongodb.DBObject, com.mongodb.DBObject, com.mongodb.DBObject, java.lang.String, java.lang.String)} with finalize=null + * @param key - { a : true } + * @param cond - optional condition on query + * @param reduce javascript reduce function + * @param initial initial value for first match on a key + * @return + * @throws MongoException + * @see http://www.mongodb.org/display/DOCS/Aggregation + */ + public DBObject group( DBObject key , DBObject cond , DBObject initial , String reduce ) + throws MongoException { + return group( key , cond , initial , reduce , null ); + } + + /** + * Applies a group operation + * @param key - { a : true } + * @param cond - optional condition on query + * @param reduce javascript reduce function + * @param initial initial value for first match on a key + * @param finalize An optional function that can operate on the result(s) of the reduce function. + * @return + * @throws MongoException + * @see http://www.mongodb.org/display/DOCS/Aggregation + */ + public DBObject group( DBObject key , DBObject cond , DBObject initial , String reduce , String finalize ) + throws MongoException { + GroupCommand cmd = new GroupCommand(this, key, cond, initial, reduce, finalize); + return group( cmd ); + } + + /** + * Applies a group operation + * @param cmd the group command + * @return + * @throws MongoException + * @see http://www.mongodb.org/display/DOCS/Aggregation + */ + public DBObject group( GroupCommand cmd ) { + CommandResult res = _db.command( cmd.toDBObject(), getOptions() ); + res.throwOnError(); + return (DBObject)res.get( "retval" ); + } + + + /** + * @deprecated prefer the {@link DBCollection#group(com.mongodb.GroupCommand)} which is more standard + * Applies a group operation + * @param args object representing the arguments to the group function + * @return + * @throws MongoException + * @see http://www.mongodb.org/display/DOCS/Aggregation + */ + @Deprecated + public DBObject group( DBObject args ) + throws MongoException { + args.put( "ns" , getName() ); + CommandResult res = _db.command( new BasicDBObject( "group" , args ), getOptions() ); + res.throwOnError(); + return (DBObject)res.get( "retval" ); + } + + /** + * find distinct values for a key + * @param key + * @return + */ + public List distinct( String key ){ + return distinct( key , new BasicDBObject() ); + } + + /** + * find distinct values for a key + * @param key + * @param query query to match + * @return + */ + public List distinct( String key , DBObject query ){ + DBObject c = BasicDBObjectBuilder.start() + .add( "distinct" , getName() ) + .add( "key" , key ) + .add( "query" , query ) + .get(); + + CommandResult res = _db.command( c, getOptions() ); + res.throwOnError(); + return (List)(res.get( "values" )); + } + + /** + * performs a map reduce operation + * Runs the command in REPLACE output mode (saves to named collection) + * + * @param map + * map function in javascript code + * @param outputTarget + * optional - leave null if want to use temp collection + * @param reduce + * reduce function in javascript code + * @param query + * to match + * @return + * @throws MongoException + * @dochub mapreduce + */ + public MapReduceOutput mapReduce( String map , String reduce , String outputTarget , DBObject query ) throws MongoException{ + return mapReduce( new MapReduceCommand( this , map , reduce , outputTarget , MapReduceCommand.OutputType.REPLACE, query ) ); + } + + /** + * performs a map reduce operation + * Specify an outputType to control job execution + * * INLINE - Return results inline + * * REPLACE - Replace the output collection with the job output + * * MERGE - Merge the job output with the existing contents of outputTarget + * * REDUCE - Reduce the job output with the existing contents of + * outputTarget + * + * @param map + * map function in javascript code + * @param outputTarget + * optional - leave null if want to use temp collection + * @param outputType + * set the type of job output + * @param reduce + * reduce function in javascript code + * @param query + * to match + * @return + * @throws MongoException + * @dochub mapreduce + */ + public MapReduceOutput mapReduce( String map , String reduce , String outputTarget , MapReduceCommand.OutputType outputType , DBObject query ) + throws MongoException{ + return mapReduce( new MapReduceCommand( this , map , reduce , outputTarget , outputType , query ) ); + } + + /** + * performs a map reduce operation + * + * @param command + * object representing the parameters + * @return + * @throws MongoException + */ + public MapReduceOutput mapReduce( MapReduceCommand command ) throws MongoException{ + DBObject cmd = command.toDBObject(); + // if type in inline, then query options like slaveOk is fine + CommandResult res = null; + if (command.getOutputType() == MapReduceCommand.OutputType.INLINE) + res = _db.command( cmd, getOptions(), command.getReadPreference() != null ? command.getReadPreference() : getReadPreference() ); + else + res = _db.command( cmd ); + res.throwOnError(); + return new MapReduceOutput( this , cmd, res ); + } + + /** + * performs a map reduce operation + * + * @param command + * object representing the parameters + * @return + * @throws MongoException + */ + public MapReduceOutput mapReduce( DBObject command ) throws MongoException{ + if ( command.get( "mapreduce" ) == null && command.get( "mapReduce" ) == null ) + throw new IllegalArgumentException( "need mapreduce arg" ); + CommandResult res = _db.command( command ); + res.throwOnError(); + return new MapReduceOutput( this , command, res ); + } + + /** + * Return a list of the indexes for this collection. Each object + * in the list is the "info document" from MongoDB + * + * @return list of index documents + */ + public List getIndexInfo() { + BasicDBObject cmd = new BasicDBObject(); + cmd.put("ns", getFullName()); + + DBCursor cur = _db.getCollection("system.indexes").find(cmd); + + List list = new ArrayList(); + + while(cur.hasNext()) { + list.add(cur.next()); + } + + return list; + } + + /** + * Drops an index from this collection + * @param keys keys of the index + * @throws MongoException + */ + public void dropIndex( DBObject keys ) + throws MongoException { + dropIndexes( genIndexName( keys ) ); + } + + /** + * Drops an index from this collection + * @param name name of index to drop + * @throws MongoException + */ + public void dropIndex( String name ) + throws MongoException { + dropIndexes( name ); + } + + /** + * gets the collections statistics ("collstats" command) + * @return + */ + public CommandResult getStats() { + return getDB().command(new BasicDBObject("collstats", getName()), getOptions()); + } + + /** + * returns whether or not this is a capped collection + * @return + */ + public boolean isCapped() { + CommandResult stats = getStats(); + Object capped = stats.get("capped"); + return(capped != null && (Integer)capped == 1); + } + + // ------ + + /** + * Initializes a new collection. No operation is actually performed on the database. + * @param base database in which to create the collection + * @param name the name of the collection + */ + protected DBCollection( DB base , String name ){ + _db = base; + _name = name; + _fullName = _db.getName() + "." + name; + _options = new Bytes.OptionHolder( _db._options ); + _decoderFactory = _db.getMongo().getMongoOptions().dbDecoderFactory; + _encoderFactory = _db.getMongo().getMongoOptions().dbEncoderFactory; + } + + protected DBObject _checkObject( DBObject o , boolean canBeNull , boolean query ){ + if ( o == null ){ + if ( canBeNull ) + return null; + throw new IllegalArgumentException( "can't be null" ); + } + + if ( o.isPartialObject() && ! query ) + throw new IllegalArgumentException( "can't save partial objects" ); + + if ( ! query ){ + _checkKeys(o); + } + return o; + } + + /** + * Checks key strings for invalid characters. + */ + private void _checkKeys( DBObject o ) { + for ( String s : o.keySet() ){ + validateKey ( s ); + Object inner = o.get( s ); + if ( inner instanceof DBObject ) { + _checkKeys( (DBObject)inner ); + } else if ( inner instanceof Map ) { + _checkKeys( (Map)inner ); + } + } + } + + /** + * Checks key strings for invalid characters. + */ + private void _checkKeys( Map o ) { + for ( String s : o.keySet() ){ + validateKey ( s ); + Object inner = o.get( s ); + if ( inner instanceof DBObject ) { + _checkKeys( (DBObject)inner ); + } else if ( inner instanceof Map ) { + _checkKeys( (Map)inner ); + } + } + } + + /** + * Check for invalid key names + * @param s the string field/key to check + * @exception IllegalArgumentException if the key is not valid. + */ + private void validateKey(String s ) { + if ( s.contains( "." ) ) + throw new IllegalArgumentException( "fields stored in the db can't have . in them. (Bad Key: '" + s + "')" ); + if ( s.startsWith( "$" ) ) + throw new IllegalArgumentException( "fields stored in the db can't start with '$' (Bad Key: '" + s + "')" ); + } + + /** + * Finds a collection that is prefixed with this collection's name. + * A typical use of this might be + *
+     *    DBCollection users = mongo.getCollection( "wiki" ).getCollection( "users" );
+     * 
+ * Which is equivalent to + *
+ * DBCollection users = mongo.getCollection( "wiki.users" ); + *
+ * @param n the name of the collection to find + * @return the matching collection + */ + public DBCollection getCollection( String n ){ + return _db.getCollection( _name + "." + n ); + } + + /** + * Returns the name of this collection. + * @return the name of this collection + */ + public String getName(){ + return _name; + } + + /** + * Returns the full name of this collection, with the database name as a prefix. + * @return the name of this collection + */ + public String getFullName(){ + return _fullName; + } + + /** + * Returns the database this collection is a member of. + * @return this collection's database + */ + public DB getDB(){ + return _db; + } + + /** + * Returns if this collection's database is read-only + * @param strict if an exception should be thrown if the database is read-only + * @return if this collection's database is read-only + * @throws RuntimeException if the database is read-only and strict is set + */ + protected boolean checkReadOnly( boolean strict ){ + if ( ! _db._readOnly ) + return false; + + if ( ! strict ) + return true; + + throw new IllegalStateException( "db is read only" ); + } + + @Override + public int hashCode(){ + return _fullName.hashCode(); + } + + @Override + public boolean equals( Object o ){ + return o == this; + } + + @Override + public String toString(){ + return _name; + } + + /** + * Sets a default class for objects in this collection; null resets the class to nothing. + * @param c the class + * @throws IllegalArgumentException if c is not a DBObject + */ + public void setObjectClass( Class c ){ + if ( c == null ){ + // reset + _wrapper = null; + _objectClass = null; + return; + } + + if ( ! DBObject.class.isAssignableFrom( c ) ) + throw new IllegalArgumentException( c.getName() + " is not a DBObject" ); + _objectClass = c; + if ( ReflectionDBObject.class.isAssignableFrom( c ) ) + _wrapper = ReflectionDBObject.getWrapper( c ); + else + _wrapper = null; + } + + /** + * Gets the default class for objects in the collection + * @return the class + */ + public Class getObjectClass(){ + return _objectClass; + } + + /** + * sets the internal class + * @param path + * @param c + */ + public void setInternalClass( String path , Class c ){ + _internalClass.put( path , c ); + } + + /** + * gets the internal class + * @param path + * @return + */ + protected Class getInternalClass( String path ){ + Class c = _internalClass.get( path ); + if ( c != null ) + return c; + + if ( _wrapper == null ) + return null; + return _wrapper.getInternalClass( path ); + } + + /** + * Set the write concern for this collection. Will be used for + * writes to this collection. Overrides any setting of write + * concern at the DB level. See the documentation for + * {@link WriteConcern} for more information. + * + * @param concern write concern to use + */ + public void setWriteConcern( WriteConcern concern ){ + _concern = concern; + } + + /** + * Get the write concern for this collection. + * @return + */ + public WriteConcern getWriteConcern(){ + if ( _concern != null ) + return _concern; + return _db.getWriteConcern(); + } + + /** + * Sets the read preference for this collection. Will be used as default + * for reads from this collection; overrides DB & Connection level settings. + * See the * documentation for {@link ReadPreference} for more information. + * + * @param preference Read Preference to use + */ + public void setReadPreference( ReadPreference preference ){ + _readPref = preference; + } + + /** + * Gets the read preference + * @return + */ + public ReadPreference getReadPreference(){ + if ( _readPref != null ) + return _readPref; + return _db.getReadPreference(); + } + /** + * makes this query ok to run on a slave node + * + * @deprecated Replaced with ReadPreference.SECONDARY + * @see com.mongodb.ReadPreference.SECONDARY + */ + @Deprecated + public void slaveOk(){ + addOption( Bytes.QUERYOPTION_SLAVEOK ); + } + + /** + * adds a default query option + * @param option + */ + public void addOption( int option ){ + _options.add( option ); + } + + /** + * sets the default query options + * @param options + */ + public void setOptions( int options ){ + _options.set( options ); + } + + /** + * resets the default query options + */ + public void resetOptions(){ + _options.reset(); + } + + /** + * gets the default query options + * @return + */ + public int getOptions(){ + return _options.get(); + } + + public void setDBDecoderFactory(DBDecoderFactory fact) { + if (fact == null) + _decoderFactory = _db.getMongo().getMongoOptions().dbDecoderFactory; + else + _decoderFactory = fact; + } + + public DBDecoderFactory getDBDecoderFactory() { + return _decoderFactory; + } + + public void setDBEncoderFactory(DBEncoderFactory fact) { + if (fact == null) + _encoderFactory = _db.getMongo().getMongoOptions().dbEncoderFactory; + else + _encoderFactory = fact; + } + + public DBEncoderFactory getDBEncoderFactory() { + return _encoderFactory; + } + + final DB _db; + + final protected String _name; + final protected String _fullName; + + protected List _hintFields; + private WriteConcern _concern = null; + private ReadPreference _readPref = null; + private DBDecoderFactory _decoderFactory; + private DBEncoderFactory _encoderFactory; + final Bytes.OptionHolder _options; + + protected Class _objectClass = null; + private Map _internalClass = Collections.synchronizedMap( new HashMap() ); + private ReflectionDBObject.JavaWrapper _wrapper = null; + + final private Set _createdIndexes = new HashSet(); + +} diff --git a/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/BaseMongoTest.java b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/BaseMongoTest.java new file mode 100644 index 0000000..493e4b3 --- /dev/null +++ b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/BaseMongoTest.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk; + +import java.io.InputStream; +import java.util.Properties; + +import org.apache.jackrabbit.mongomk.MongoConnection; +import org.apache.jackrabbit.mongomk.util.MongoUtil; +import org.apache.log4j.BasicConfigurator; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; + + +/** + * Base class for {@code MongoDB} tests. + * + * @author actualPaths = result.getAffectedPaths(); + Assert.assertEquals(new HashSet(Arrays.asList(expectedPaths)), new HashSet(actualPaths)); + } + + public static void assertCommitExists(Commit commit) { + DBCollection commitCollection = mongoConnection.getCommitCollection(); + DBObject query = QueryBuilder.start(CommitMongo.KEY_REVISION_ID) + .is(MongoUtil.toMongoRepresentation(commit.getRevisionId())).and(CommitMongo.KEY_MESSAGE) + .is(commit.getMessage()).and(CommitMongo.KEY_DIFF).is(commit.getDiff()).and(CommitMongo.KEY_PATH) + .is(commit.getPath()).and(CommitMongo.KEY_FAILED).notEquals(Boolean.TRUE).get(); + CommitMongo result = (CommitMongo) commitCollection.findOne(query); + Assert.assertNotNull(result); + } + + public static void assertHeadRevision(long revisionId) { + DBCollection headCollection = mongoConnection.getHeadCollection(); + HeadMongo result = (HeadMongo) headCollection.findOne(); + Assert.assertEquals(revisionId, result.getHeadRevisionId()); + } + + public static void assertNextRevision(long revisionId) { + DBCollection headCollection = mongoConnection.getHeadCollection(); + HeadMongo result = (HeadMongo) headCollection.findOne(); + Assert.assertEquals(revisionId, result.getNextRevisionId()); + } + + public static void assertNodeRevisionId(String path, String revisionId, boolean exists) { + DBCollection nodeCollection = mongoConnection.getNodeCollection(); + DBObject query = QueryBuilder.start(NodeMongo.KEY_PATH).is(path).and(NodeMongo.KEY_REVISION_ID) + .is(MongoUtil.toMongoRepresentation(revisionId)).get(); + NodeMongo nodeMongo = (NodeMongo) nodeCollection.findOne(query); + + if (exists) { + Assert.assertNotNull(nodeMongo); + } else { + Assert.assertNull(nodeMongo); + } + } + + public static void assertNodesExist(String parentPath, Node expected) { + DBCollection nodeCollection = mongoConnection.getNodeCollection(); + QueryBuilder qb = QueryBuilder.start(NodeMongo.KEY_PATH).is(expected.getPath()).and(NodeMongo.KEY_REVISION_ID) + .is(MongoUtil.toMongoRepresentation(expected.getRevisionId())); + Map properties = expected.getProperties(); + if (properties != null) { + for (Map.Entry entry : properties.entrySet()) { + qb.and(NodeMongo.KEY_PROPERTIES + "." + entry.getKey()).is(entry.getValue()); + } + } + + DBObject query = qb.get(); + + NodeMongo nodeMongo = (NodeMongo) nodeCollection.findOne(query); + Assert.assertNotNull(nodeMongo); + + Set children = expected.getChildren(); + if (children != null) { + List childNames = nodeMongo.getChildren(); + Assert.assertNotNull(childNames); + Assert.assertEquals(children.size(), childNames.size()); + Assert.assertEquals(children.size(), new HashSet(childNames).size()); + for (Node child : children) { + assertNodesExist(expected.getPath(), child); + Assert.assertTrue(childNames.contains(child.getName())); + } + } else { + Assert.assertNull(nodeMongo.getChildren()); + } + } + + static void setMongoConnection(MongoConnection mongoConnection) { + // must be set prior to using this class. + MongoAssert.mongoConnection = mongoConnection; + } + + private MongoAssert() { + // no instantiation + } +} diff --git a/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/CommitCommandMongoTest.java b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/CommitCommandMongoTest.java new file mode 100644 index 0000000..08e2301 --- /dev/null +++ b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/CommitCommandMongoTest.java @@ -0,0 +1,377 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.command; + +import static org.junit.Assert.fail; + +import java.util.LinkedList; +import java.util.List; + +import org.apache.jackrabbit.mongomk.BaseMongoTest; +import org.apache.jackrabbit.mongomk.MongoAssert; +import org.apache.jackrabbit.mongomk.api.model.Commit; +import org.apache.jackrabbit.mongomk.api.model.Instruction; +import org.apache.jackrabbit.mongomk.api.model.Node; +import org.apache.jackrabbit.mongomk.impl.builder.NodeBuilder; +import org.apache.jackrabbit.mongomk.impl.model.AddNodeInstructionImpl; +import org.apache.jackrabbit.mongomk.impl.model.AddPropertyInstructionImpl; +import org.apache.jackrabbit.mongomk.impl.model.CommitImpl; +import org.apache.jackrabbit.mongomk.impl.model.RemoveNodeInstructionImpl; +import org.apache.jackrabbit.mongomk.scenario.SimpleNodeScenario; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Test; + +/** + * @author instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/", "1")); + + Commit commit = new CommitImpl("This is the 1st commit", "/", "+1 : {}", instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, commit); + String firstRevisionId = command.execute(); + + instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/", "2")); + + commit = new CommitImpl("This is the 2nd commit", "/", "+2 : {}", instructions); + command = new CommitCommandMongo(mongoConnection, commit); + String secondRevisionId = command.execute(); + + instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/", "3")); + + commit = new CommitImpl("This is the 3rd commit", "/", "+3 : {}", instructions); + command = new CommitCommandMongo(mongoConnection, commit); + String thirdRevisionId = command.execute(); + + MongoAssert.assertNodesExist("", NodeBuilder.build(String.format( + "{ \"/#%3$s\" : { \"1#%1$s\" : { } , \"2#%2$s\" : { } , \"3#%3$s\" : { } } }", + firstRevisionId, secondRevisionId, thirdRevisionId))); + } + + @Test + public void testCommitAddNodes() throws Exception { + List instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/", "a")); + instructions.add(new AddNodeInstructionImpl("/a", "b")); + instructions.add(new AddNodeInstructionImpl("/a", "c")); + + Commit commit = new CommitImpl("This is a simple commit", "/", "+a : { b : {} , c : {} }", instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, commit); + String revisionId = command.execute(); + + Assert.assertNotNull(revisionId); + MongoAssert.assertNodesExist("", NodeBuilder.build(String.format( + "{ \"/#%1$s\" : { \"a#%1$s\" : { \"b#%1$s\" : {} , \"c#%1$s\" : {} } } }", revisionId))); + + MongoAssert.assertCommitExists(commit); + MongoAssert.assertCommitContainsAffectedPaths(commit.getRevisionId(), "/", "/a", "/a/b", "/a/c"); + MongoAssert.assertHeadRevision(1); + MongoAssert.assertNextRevision(2); + } + + @Test + public void testCommitAddNodesAndPropertiesOutOfOrder() throws Exception { + List instructions = new LinkedList(); + instructions.add(new AddPropertyInstructionImpl("/a", "key1", "value1")); + instructions.add(new AddNodeInstructionImpl("/", "a")); + instructions.add(new AddNodeInstructionImpl("/a", "b")); + instructions.add(new AddPropertyInstructionImpl("/a/b", "key2", "value2")); + instructions.add(new AddPropertyInstructionImpl("/a/c", "key3", "value3")); + instructions.add(new AddNodeInstructionImpl("/a", "c")); + + Commit commit = new CommitImpl("This is a simple commit", "/", + "+a : { \"key1\" : \"value1\" , \"key2\" : \"value2\" , \"key3\" : \"value3\" }", instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, commit); + String revisionId = command.execute(); + + Assert.assertNotNull(revisionId); + MongoAssert + .assertNodesExist( + "", + NodeBuilder.build(String + .format("{ \"/#%1$s\" : { \"a#%1$s\" : { \"key1\" : \"value1\", \"b#%1$s\" : { \"key2\" : \"value2\" } , \"c#%1$s\" : { \"key3\" : \"value3\" } } } }", + revisionId))); + + MongoAssert.assertCommitExists(commit); + MongoAssert.assertCommitContainsAffectedPaths(commit.getRevisionId(), "/", "/a", "/a/b", "/a/c"); + MongoAssert.assertHeadRevision(1); + MongoAssert.assertNextRevision(2); + } + + @Test + public void testCommitAddNodesWhichAlreadyExist() throws Exception { + SimpleNodeScenario scenario1 = new SimpleNodeScenario(mongoConnection); + scenario1.create(); + + List instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/", "a")); + instructions.add(new AddPropertyInstructionImpl("/a", "key1", "value1")); + instructions.add(new AddNodeInstructionImpl("/a", "b")); + instructions.add(new AddPropertyInstructionImpl("/a/b", "key2", "value2")); + instructions.add(new AddNodeInstructionImpl("/a", "c")); + instructions.add(new AddPropertyInstructionImpl("/a/c", "key3", "value3")); + + Commit commit = new CommitImpl("This is a simple commit", "/", + "+a : { \"key1\" : \"value1\" , \"key2\" : \"value2\" , \"key3\" : \"value3\" }", instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, commit); + String revisionId = command.execute(); + + Assert.assertNotNull(revisionId); + MongoAssert + .assertNodesExist( + "", + NodeBuilder.build(String + .format("{ \"/#%1$s\" : { \"a#%1$s\" : { \"int\" : 1 , \"key1\" : \"value1\", \"b#%1$s\" : { \"string\" : \"foo\" , \"key2\" : \"value2\" } , \"c#%1$s\" : { \"bool\" : true , \"key3\" : \"value3\" } } } }", + revisionId))); + + MongoAssert.assertCommitExists(commit); + // MongoAssert.assertCommitContainsAffectedPaths(commit.getRevisionId(), "/a", "/a/b", "/a/c"); TODO think about + // whether / should really be included since it already contained /a + MongoAssert.assertCommitContainsAffectedPaths(commit.getRevisionId(), "/", "/a", "/a/b", "/a/c"); + } + + @Test + public void testCommitAndMergeNodes() throws Exception { + SimpleNodeScenario scenario1 = new SimpleNodeScenario(mongoConnection); + String firstRevisionId = scenario1.create(); + String secondRevisionId = scenario1.update_A_and_add_D_and_E(); + + SimpleNodeScenario scenario2 = new SimpleNodeScenario(mongoConnection); + String thirdRevisionId = scenario2.create(); + + MongoAssert + .assertNodesExist( + "", + NodeBuilder.build(String + .format("{ \"/#%1$s\" : { \"a#%1$s\" : { \"int\" : 1 , \"b#%1$s\" : { \"string\" : \"foo\" } , \"c#%1$s\" : { \"bool\" : true } } } }", + firstRevisionId))); + MongoAssert + .assertNodesExist( + "", + NodeBuilder.build(String + .format("{ \"/#%1$s\" : { \"a#%2$s\" : { \"int\" : 1 , \"double\" : 0.123 , \"b#%2$s\" : { \"string\" : \"foo\" , \"e#%2$s\" : { \"array\" : [ 123, null, 123.456, \"for:bar\", true ] } } , \"c#%1$s\" : { \"bool\" : true }, \"d#%2$s\" : { \"null\" : null } } } }", + firstRevisionId, secondRevisionId))); + MongoAssert + .assertNodesExist( + "", + NodeBuilder.build(String + .format("{ \"/#%3$s\" : { \"a#%3$s\" : { \"int\" : 1 , \"double\" : 0.123 , \"b#%3$s\" : { \"string\" : \"foo\" , \"e#%2$s\" : { \"array\" : [ 123, null, 123.456, \"for:bar\", true ] } } , \"c#%3$s\" : { \"bool\" : true }, \"d#%2$s\" : { \"null\" : null } } } }", + firstRevisionId, secondRevisionId, + thirdRevisionId))); + } + + @Test + public void testCommitContainsAllAffectedNodes() throws Exception { + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + String firstRevisionId = scenario.create(); + String secondRevisionId = scenario.update_A_and_add_D_and_E(); + + MongoAssert.assertCommitContainsAffectedPaths(firstRevisionId, "/", "/a", "/a/b", "/a/c"); + MongoAssert.assertCommitContainsAffectedPaths(secondRevisionId, "/a", "/a/b", "/a/d", "/a/b/e"); + } + + @Test + public void testRemoveNode() throws Exception { + List instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/", "a")); + instructions.add(new AddNodeInstructionImpl("/a", "b")); + instructions.add(new AddNodeInstructionImpl("/a", "c")); + + Commit commit = new CommitImpl("This is a simple commit", "/", "+a : { b : {} , c : {} }", instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, commit); + String revisionId = command.execute(); + Assert.assertNotNull(revisionId); + + instructions = new LinkedList(); + instructions.add(new RemoveNodeInstructionImpl("/", "a")); + + commit = new CommitImpl("This is a simple commit", "/", "-a", instructions); + command = new CommitCommandMongo(mongoConnection, commit); + revisionId = command.execute(); + Assert.assertNotNull(revisionId); + + MongoAssert.assertNodesExist("", + NodeBuilder.build(String.format("{ \"/#%1$s\" : {} }", revisionId))); + + MongoAssert.assertCommitExists(commit); + MongoAssert.assertCommitContainsAffectedPaths(commit.getRevisionId(), "/"); + } + + @Test + @Ignore // FIXME + public void testRemoveNonExistentNode() throws Exception { + List instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/", "a")); + instructions.add(new AddNodeInstructionImpl("/a", "b")); + + Commit commit = new CommitImpl("Add nodes", "/", "+a : { b : {} }", instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, commit); + command.execute(); + + instructions = new LinkedList(); + instructions.add(new RemoveNodeInstructionImpl("/a", "c")); + + commit = new CommitImpl("Non-existent node delete", "/a", "-c", instructions); + command = new CommitCommandMongo(mongoConnection, commit); + try { + command.execute(); + fail("Exception expected"); + } catch (Exception expected) { + + } + } + + @Test + public void testExistingParentContainsChildren() throws Exception { + List instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/", "a")); + instructions.add(new AddNodeInstructionImpl("/", "b")); + instructions.add(new AddNodeInstructionImpl("/", "c")); + + Commit commit = new CommitImpl("This is a simple commit", "/", "+a : { b : {} , c : {} }", instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, commit); + String revisionId = command.execute(); + + Assert.assertNotNull(revisionId); + MongoAssert.assertNodesExist("", NodeBuilder.build(String.format( + "{ \"/#%1$s\" : { \"a#%1$s\" : {}, \"b#%1$s\" : {} , \"c#%1$s\" : {} } }", revisionId))); + + GetNodesCommandMongo command2 = new GetNodesCommandMongo(mongoConnection, "/", revisionId, 0); + Node rootOfPath = command2.execute(); + Assert.assertEquals(3, rootOfPath.getChildCount()); + } + + @Test + public void testMergePropertiesAndChildren_noneExistedAndNewAdded() throws Exception { + List instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/", "a")); + instructions.add(new AddPropertyInstructionImpl("/a", "key1", "value1")); + instructions.add(new AddPropertyInstructionImpl("/a", "key2", "value2")); + instructions.add(new AddPropertyInstructionImpl("/a", "key3", "value3")); + + Commit commit = new CommitImpl("This is a simple commit", "/", + "+a : { \"key1\" : \"value1\" , \"key2\" : \"value2\" , \"key3\" : \"value3\" }", instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, commit); + String revisionId = command.execute(); + + MongoAssert.assertNodesExist("", NodeBuilder.build(String.format("{ \"/#%1$s\" : {} }", "0"))); + MongoAssert + .assertNodesExist( + "", + NodeBuilder.build(String + .format("{ \"/#%1$s\" : { \"a#%1$s\" : { \"key1\" : \"value1\", \"key2\" : \"value2\", \"key3\" : \"value3\" } } }", + revisionId))); + } + + @Test + public void testMergePropertiesAndChildren_someExistedAndNewAdded() throws Exception { + List instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/", "a")); + instructions.add(new AddPropertyInstructionImpl("/a", "existed_key1", "value1")); + instructions.add(new AddPropertyInstructionImpl("/a", "existed_key2", "value2")); + instructions.add(new AddPropertyInstructionImpl("/a", "existed_key3", "value3")); + + Commit commit = new CommitImpl( + "This is a simple commit", + "/", + "+a : { \"existed_key1\" : \"value1\" , \"existed_key2\" : \"value2\" , \"existed_key3\" : \"value3\" }", + instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, commit); + String revisionId = command.execute(); + + instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/", "a")); + instructions.add(new AddPropertyInstructionImpl("/a", "key1", "value1")); + instructions.add(new AddPropertyInstructionImpl("/a", "key2", "value2")); + instructions.add(new AddPropertyInstructionImpl("/a", "key3", "value3")); + + commit = new CommitImpl("This is a simple commit", "/", + "+a : { \"key1\" : \"value1\" , \"key2\" : \"value2\" , \"key3\" : \"value3\" }", instructions); + command = new CommitCommandMongo(mongoConnection, commit); + revisionId = command.execute(); + + MongoAssert.assertNodesExist("", NodeBuilder.build(String.format("{ \"/#%1$s\" : {} }", "0"))); + MongoAssert + .assertNodesExist( + "", + NodeBuilder.build(String + .format("{ \"/#%1$s\" : { \"a#%1$s\" : { \"existed_key1\" : \"value1\", \"existed_key2\" : \"value2\", \"existed_key3\" : \"value3\", \"key1\" : \"value1\", \"key2\" : \"value2\", \"key3\" : \"value3\" } } }", + revisionId))); + } + + @Test + public void testNoOtherNodesTouched() throws Exception { + List instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/", "a")); + instructions.add(new AddNodeInstructionImpl("/", "b")); + instructions.add(new AddNodeInstructionImpl("/", "c")); + + Commit commit = new CommitImpl("This is a simple commit", "/", "+a : { b : {} , c : {} }", instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, commit); + String firstRevisionId = command.execute(); + + instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/a", "d")); + instructions.add(new AddNodeInstructionImpl("/a", "e")); + + commit = new CommitImpl("This is a simple commit", "/a", "+d: {} \n+e : {}", instructions); + command = new CommitCommandMongo(mongoConnection, commit); + String secondRevisionId = command.execute(); + + MongoAssert.assertNodeRevisionId("/", firstRevisionId, true); + MongoAssert.assertNodeRevisionId("/a", firstRevisionId, true); + MongoAssert.assertNodeRevisionId("/b", firstRevisionId, true); + MongoAssert.assertNodeRevisionId("/c", firstRevisionId, true); + MongoAssert.assertNodeRevisionId("/a/d", firstRevisionId, false); + MongoAssert.assertNodeRevisionId("/a/e", firstRevisionId, false); + + MongoAssert.assertNodeRevisionId("/", secondRevisionId, false); + MongoAssert.assertNodeRevisionId("/a", secondRevisionId, true); + MongoAssert.assertNodeRevisionId("/b", secondRevisionId, false); + MongoAssert.assertNodeRevisionId("/c", secondRevisionId, false); + MongoAssert.assertNodeRevisionId("/a/d", secondRevisionId, true); + MongoAssert.assertNodeRevisionId("/a/e", secondRevisionId, true); + } + + @Test + @Ignore /// FIXME + public void testRootNodeHasEmptyRootPath() throws Exception { + List instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("", "/")); + + Commit commit = new CommitImpl("This is the root commit", "", "+/ : {}", instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, commit); + String revisionId = command.execute(); + + Assert.assertNotNull(revisionId); + MongoAssert.assertNodesExist("", + NodeBuilder.build(String.format("{ \"/#%1$s\" : {} }", revisionId))); + } +} diff --git a/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/ConcurrentCommitCommandMongoTest.java b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/ConcurrentCommitCommandMongoTest.java new file mode 100644 index 0000000..ee478d0 --- /dev/null +++ b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/ConcurrentCommitCommandMongoTest.java @@ -0,0 +1,136 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.command; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +import org.apache.jackrabbit.mongomk.BaseMongoTest; +import org.apache.jackrabbit.mongomk.api.command.CommandExecutor; +import org.apache.jackrabbit.mongomk.api.model.Commit; +import org.apache.jackrabbit.mongomk.api.model.Instruction; +import org.apache.jackrabbit.mongomk.api.model.Node; +import org.apache.jackrabbit.mongomk.impl.command.CommandExecutorImpl; +import org.apache.jackrabbit.mongomk.impl.model.AddNodeInstructionImpl; +import org.apache.jackrabbit.mongomk.impl.model.CommitImpl; +import org.junit.Assert; +import org.junit.Test; + +/** + * @author commands = new ArrayList(numOfConcurrentThreads); + for (int i = 0; i < numOfConcurrentThreads; ++i) { + List instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/", String.valueOf(i))); + Commit commit = new CommitImpl("This is a concurrent commit", "/", "+" + i + " : {}", instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, commit) { + @Override + protected boolean saveAndSetHeadRevision() throws Exception { + try { + synchronized (waitLock) { + waitLock.wait(); + } + + return super.saveAndSetHeadRevision(); + } catch (InterruptedException e) { + e.printStackTrace(); + return false; + } + }; + }; + commands.add(command); + } + + // execute the commands + final CommandExecutor commandExecutor = new CommandExecutorImpl(); + ExecutorService executorService = Executors.newFixedThreadPool(numOfConcurrentThreads); + final List revisionIds = new LinkedList(); + for (int i = 0; i < numOfConcurrentThreads; ++i) { + final CommitCommandMongo command = commands.get(i); + Runnable runnable = new Runnable() { + + @Override + public void run() { + try { + String revisionId = commandExecutor.execute(command); + revisionIds.add(revisionId); + } catch (Exception e) { + revisionIds.add(null); + } + } + }; + executorService.execute(runnable); + } + + // notify the wait lock to execute the command concurrently + do { + Thread.sleep(1500); + synchronized (waitLock) { + waitLock.notifyAll(); + } + } while (revisionIds.size() < numOfConcurrentThreads); + + // verify the result by sorting the revision ids and verifying that all children are contained in the next + // revision + Collections.sort(revisionIds, new Comparator() { + @Override + public int compare(String o1, String o2) { + return Long.valueOf(o1).compareTo(Long.valueOf(o2)); + } + }); + List lastChildren = new LinkedList(); + for (int i = 0; i < numOfConcurrentThreads; ++i) { + String revisionId = revisionIds.get(i); + + GetNodesCommandMongo command2 = new GetNodesCommandMongo(mongoConnection, "/", revisionId, 0); + Node root = command2.execute(); + Set children = root.getChildren(); + for (String lastChild : lastChildren) { + boolean contained = false; + for (Node childNode : children) { + if (childNode.getName().equals(lastChild)) { + contained = true; + break; + } + } + Assert.assertTrue(contained); + } + lastChildren.clear(); + for (Node childNode : children) { + lastChildren.add(childNode.getName()); + } + } + + // TODO Assert the number of commits + } +} diff --git a/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/GetBlobLengthCommandMongoTest.java b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/GetBlobLengthCommandMongoTest.java new file mode 100644 index 0000000..facf039 --- /dev/null +++ b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/GetBlobLengthCommandMongoTest.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.command; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +import java.io.ByteArrayInputStream; + +import org.apache.jackrabbit.mongomk.BaseMongoTest; +import org.apache.jackrabbit.mongomk.command.GetBlobLengthCommandMongo; +import org.junit.Test; + +import com.mongodb.gridfs.GridFS; +import com.mongodb.gridfs.GridFSInputFile; + +public class GetBlobLengthCommandMongoTest extends BaseMongoTest { + + @Test + public void testGetBlobLength() throws Exception { + int blobLength = 100; + String blobId = createAndWriteBlob(blobLength); + + GetBlobLengthCommandMongo command = new GetBlobLengthCommandMongo(mongoConnection, blobId); + long length = command.execute(); + assertEquals(blobLength, length); + } + + @Test + public void testNonExistantBlobLength() throws Exception { + GetBlobLengthCommandMongo command = new GetBlobLengthCommandMongo(mongoConnection, + "nonExistantBlobId"); + try { + command.execute(); + fail("Exception expected"); + } catch (Exception expected) { + } + } + + private String createAndWriteBlob(int blobLength) { + byte[] blob = createBlob(blobLength); + return writeBlob(blob); + } + + private byte[] createBlob(int blobLength) { + byte[] blob = new byte[blobLength]; + for (int i = 0; i < blob.length; i++) { + blob[i] = (byte)i; + } + return blob; + } + + private String writeBlob(byte[] blob) { + GridFS gridFS = mongoConnection.getGridFS(); + GridFSInputFile gridFSInputFile = gridFS.createFile(new ByteArrayInputStream(blob), true); + gridFSInputFile.save(); + return gridFSInputFile.getMD5(); + } +} diff --git a/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/GetHeadRevisionCommandMongoTest.java b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/GetHeadRevisionCommandMongoTest.java new file mode 100644 index 0000000..ded521e --- /dev/null +++ b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/GetHeadRevisionCommandMongoTest.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.command; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import org.apache.jackrabbit.mongomk.BaseMongoTest; +import org.apache.jackrabbit.mongomk.command.GetHeadRevisionCommandMongo; +import org.apache.jackrabbit.mongomk.scenario.SimpleNodeScenario; +import org.junit.Test; + + +@SuppressWarnings("javadoc") +public class GetHeadRevisionCommandMongoTest extends BaseMongoTest { + + @Test + public void testGeadHeadRevisionSimple() throws Exception { + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + String revisionId = scenario.create(); + + GetHeadRevisionCommandMongo command = new GetHeadRevisionCommandMongo(mongoConnection); + String revisionId2 = command.execute(); + assertTrue(revisionId.equals(revisionId2)); + + scenario.delete_A(); + String revisionId3 = command.execute(); + assertFalse(revisionId3.equals(revisionId2)); + }} diff --git a/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/GetNodesCommandMongoTest.java b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/GetNodesCommandMongoTest.java new file mode 100644 index 0000000..08135c7 --- /dev/null +++ b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/GetNodesCommandMongoTest.java @@ -0,0 +1,300 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.command; + +import java.util.Arrays; +import java.util.Set; + +import org.apache.jackrabbit.mongomk.BaseMongoTest; +import org.apache.jackrabbit.mongomk.MongoConnection; +import org.apache.jackrabbit.mongomk.api.model.Node; +import org.apache.jackrabbit.mongomk.impl.NodeAssert; +import org.apache.jackrabbit.mongomk.impl.builder.NodeBuilder; +import org.apache.jackrabbit.mongomk.model.CommitMongo; +import org.apache.jackrabbit.mongomk.model.NodeMongo; +import org.apache.jackrabbit.mongomk.scenario.SimpleNodeScenario; +import org.easymock.EasyMock; +import org.junit.Ignore; +import org.junit.Test; + +import com.mongodb.BasicDBList; +import com.mongodb.BasicDBObject; +import com.mongodb.DBCollection; +import com.mongodb.DBCursor; +import com.mongodb.DBObject; + +/** + * @author expectedNodeMongos = NodeMongo.fromNodes(expected.getDescendants(true)); + for (NodeMongo nodeMongo : expectedNodeMongos) { + + BasicDBObject groupDbObject = new BasicDBObject(); + groupDbObject.put("result", nodeMongo); + if (!nodeMongo.getPath().equals(missingPath)) { + results1.add(groupDbObject); + } + results2.add(groupDbObject); + } + + DBCollection mockNodeCollection = EasyMock.createMock(DBCollection.class); + EasyMock.expect( + mockNodeCollection.group(EasyMock.anyObject(DBObject.class), EasyMock.anyObject(DBObject.class), + EasyMock.anyObject(DBObject.class), EasyMock.anyObject(String.class))).andReturn(results1) + .once(); + EasyMock.expect( + mockNodeCollection.group(EasyMock.anyObject(DBObject.class), EasyMock.anyObject(DBObject.class), + EasyMock.anyObject(DBObject.class), EasyMock.anyObject(String.class))).andReturn(results2) + .once(); + EasyMock.replay(mockNodeCollection); + + CommitMongo firstCommit = new CommitMongo(); + firstCommit.setAffectedPaths(Arrays.asList(new String[] { "/", "/a", "/a/b", "/a/c" })); + firstCommit.setRevisionId(1L); + + CommitMongo secondCommit = new CommitMongo(); + secondCommit.setAffectedPaths(Arrays.asList(new String[] { "/a", "/a/d", "/a/b/e" })); + secondCommit.setRevisionId(2L); + + DBCursor mockDbCursor = EasyMock.createMock(DBCursor.class); + EasyMock.expect(mockDbCursor.sort(EasyMock.anyObject(DBObject.class))).andReturn(mockDbCursor); + EasyMock.expect(mockDbCursor.limit(EasyMock.anyInt())).andReturn(mockDbCursor); + EasyMock.expect(mockDbCursor.hasNext()).andReturn(true).once(); + EasyMock.expect(mockDbCursor.next()).andReturn(firstCommit).once(); + EasyMock.expect(mockDbCursor.hasNext()).andReturn(true).once(); + EasyMock.expect(mockDbCursor.next()).andReturn(secondCommit).once(); + EasyMock.expect(mockDbCursor.hasNext()).andReturn(false).once(); + EasyMock.replay(mockDbCursor); + + DBCollection mockCommitCollection = EasyMock.createMock(DBCollection.class); + EasyMock.expect(mockCommitCollection.find(EasyMock.anyObject(DBObject.class))).andReturn(mockDbCursor); + EasyMock.replay(mockCommitCollection); + + MongoConnection mockConnection = EasyMock.createMock(MongoConnection.class); + EasyMock.expect(mockConnection.getNodeCollection()).andReturn(mockNodeCollection).times(2); + EasyMock.expect(mockConnection.getCommitCollection()).andReturn(mockCommitCollection); + EasyMock.replay(mockConnection); + + return mockConnection; + } + + private MongoConnection createStaleNodeScenario(Node expected, String stalePath) { + BasicDBList results1 = new BasicDBList(); + BasicDBList results2 = new BasicDBList(); + + Set expectedNodeMongos = NodeMongo.fromNodes(expected.getDescendants(true)); + for (NodeMongo nodeMongo : expectedNodeMongos) { + BasicDBObject groupDbObject = new BasicDBObject(); + groupDbObject.put("result", nodeMongo); + results2.add(groupDbObject); + + if (nodeMongo.getPath().equals(stalePath)) { + NodeMongo nodeMongoStale = new NodeMongo(); + nodeMongoStale.putAll(nodeMongo.toMap()); + nodeMongoStale.setRevisionId(1L); + } + + results1.add(groupDbObject); + } + + DBCollection mockNodeCollection = EasyMock.createMock(DBCollection.class); + EasyMock.expect( + mockNodeCollection.group(EasyMock.anyObject(DBObject.class), EasyMock.anyObject(DBObject.class), + EasyMock.anyObject(DBObject.class), EasyMock.anyObject(String.class))).andReturn(results1) + .once(); + EasyMock.expect( + mockNodeCollection.group(EasyMock.anyObject(DBObject.class), EasyMock.anyObject(DBObject.class), + EasyMock.anyObject(DBObject.class), EasyMock.anyObject(String.class))).andReturn(results2) + .once(); + EasyMock.replay(mockNodeCollection); + + CommitMongo firstCommit = new CommitMongo(); + firstCommit.setAffectedPaths(Arrays.asList(new String[] { "/", "/a", "/a/b", "/a/c" })); + firstCommit.setRevisionId(1L); + + CommitMongo secondCommit = new CommitMongo(); + secondCommit.setAffectedPaths(Arrays.asList(new String[] { "/a", "/a/d", "/a/b/e" })); + secondCommit.setRevisionId(2L); + + DBCursor mockDbCursor = EasyMock.createMock(DBCursor.class); + EasyMock.expect(mockDbCursor.sort(EasyMock.anyObject(DBObject.class))).andReturn(mockDbCursor); + EasyMock.expect(mockDbCursor.limit(EasyMock.anyInt())).andReturn(mockDbCursor); + EasyMock.expect(mockDbCursor.hasNext()).andReturn(true).once(); + EasyMock.expect(mockDbCursor.next()).andReturn(firstCommit).once(); + EasyMock.expect(mockDbCursor.hasNext()).andReturn(true).once(); + EasyMock.expect(mockDbCursor.next()).andReturn(secondCommit).once(); + EasyMock.expect(mockDbCursor.hasNext()).andReturn(false).once(); + EasyMock.replay(mockDbCursor); + + DBCollection mockCommitCollection = EasyMock.createMock(DBCollection.class); + EasyMock.expect(mockCommitCollection.find(EasyMock.anyObject(DBObject.class))).andReturn(mockDbCursor); + EasyMock.replay(mockCommitCollection); + + MongoConnection mockConnection = EasyMock.createMock(MongoConnection.class); + EasyMock.expect(mockConnection.getNodeCollection()).andReturn(mockNodeCollection).times(2); + EasyMock.expect(mockConnection.getCommitCollection()).andReturn(mockCommitCollection); + EasyMock.replay(mockConnection); + + return mockConnection; + } +} diff --git a/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/NodeExistsCommandMongoTest.java b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/NodeExistsCommandMongoTest.java new file mode 100644 index 0000000..b2ab00f --- /dev/null +++ b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/NodeExistsCommandMongoTest.java @@ -0,0 +1,231 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.command; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.util.LinkedList; +import java.util.List; + +import org.apache.jackrabbit.mongomk.BaseMongoTest; +import org.apache.jackrabbit.mongomk.api.model.Commit; +import org.apache.jackrabbit.mongomk.api.model.Instruction; +import org.apache.jackrabbit.mongomk.impl.model.AddNodeInstructionImpl; +import org.apache.jackrabbit.mongomk.impl.model.CommitImpl; +import org.apache.jackrabbit.mongomk.impl.model.RemoveNodeInstructionImpl; +import org.apache.jackrabbit.mongomk.scenario.SimpleNodeScenario; +import org.junit.Test; + +@SuppressWarnings("javadoc") +public class NodeExistsCommandMongoTest extends BaseMongoTest { + + @Test + public void simple() throws Exception { + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + String revisionId = scenario.create(); + + NodeExistsCommandMongo command = new NodeExistsCommandMongo( + mongoConnection, "/a", revisionId); + boolean exists = command.execute(); + assertTrue(exists); + + command = new NodeExistsCommandMongo(mongoConnection, "/a/b", + revisionId); + exists = command.execute(); + assertTrue(exists); + + revisionId = scenario.delete_A(); + + command = new NodeExistsCommandMongo(mongoConnection, "/a", revisionId); + exists = command.execute(); + assertFalse(exists); + + command = new NodeExistsCommandMongo(mongoConnection, "/a/b", + revisionId); + exists = command.execute(); + assertFalse(exists); + } + + @Test + public void withoutRevisionId() throws Exception { + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + scenario.create(); + + NodeExistsCommandMongo command = new NodeExistsCommandMongo( + mongoConnection, "/a", null /* revisionId */); + boolean exists = command.execute(); + assertTrue(exists); + + scenario.delete_A(); + + command = new NodeExistsCommandMongo(mongoConnection, "/a", null /* revisionId */); + exists = command.execute(); + assertFalse(exists); + } + + @Test + public void withInvalidRevisionId() throws Exception { + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + scenario.create(); + + NodeExistsCommandMongo command = new NodeExistsCommandMongo( + mongoConnection, "/a", "123456789"); + try { + command.execute(); + fail("Expected: Invalid revision id exception"); + } catch (Exception expected) { + } + } + + @Test + public void parentDelete() throws Exception { + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + scenario.create(); + + NodeExistsCommandMongo command = new NodeExistsCommandMongo( + mongoConnection, "/a/b", null); + boolean exists = command.execute(); + assertTrue(exists); + + scenario.delete_A(); + command = new NodeExistsCommandMongo(mongoConnection, "/a/b", null); + exists = command.execute(); + assertFalse(exists); + } + + @Test + public void grandParentDelete() throws Exception { + // Add a->b->c->d. + List instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/", "a")); + instructions.add(new AddNodeInstructionImpl("/a", "b")); + instructions.add(new AddNodeInstructionImpl("/a/b", "c")); + instructions.add(new AddNodeInstructionImpl("/a/b/c", "d")); + + Commit commit = new CommitImpl("Add nodes", "/", "TODO", instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, + commit); + command.execute(); + + // Remove b. + instructions = new LinkedList(); + instructions.add(new RemoveNodeInstructionImpl("/a", "b")); + commit = new CommitImpl("Delete /b", "/a", "-b", instructions); + command = new CommitCommandMongo(mongoConnection, commit); + command.execute(); + + // Check for d. + NodeExistsCommandMongo existsCommand = new NodeExistsCommandMongo( + mongoConnection, "/a/b/c/d", null); + boolean exists = existsCommand.execute(); + assertFalse(exists); + } + + @Test + public void existsInOldRevNotInNewRev() throws Exception { + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + String rev1 = scenario.create(); + String rev2 = scenario.delete_A(); + + NodeExistsCommandMongo command = new NodeExistsCommandMongo( + mongoConnection, "/a", rev1); + boolean exists = command.execute(); + assertTrue(exists); + + command = new NodeExistsCommandMongo(mongoConnection, "/a", rev2); + exists = command.execute(); + assertFalse(exists); + } + + @Test + public void siblingDelete() throws Exception { + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + scenario.create(); + + scenario.delete_B(); + NodeExistsCommandMongo command = new NodeExistsCommandMongo( + mongoConnection, "/a/b", null); + boolean exists = command.execute(); + assertFalse(exists); + + command = new NodeExistsCommandMongo(mongoConnection, "/a/c", null); + exists = command.execute(); + assertTrue(exists); + } + + @Test + public void testNodeNotFound() throws Exception { + + // adds nodes /a,/a/b,/a/b/c , checks if node a exists + List instructions = new LinkedList(); + + // commit node /a + instructions.add(new AddNodeInstructionImpl("/", "a")); + Commit commit1 = new CommitImpl("/", "+a : {}", "Add node a", + instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, + commit1); + command.execute(); + + // commit node /a/b + instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/a", "b")); + Commit commit2 = new CommitImpl("/a", "+b : {}", "Add node a/b", + instructions); + command = new CommitCommandMongo(mongoConnection, commit2); + command.execute(); + + // commit node /a/b/c + instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/a/b", "c")); + Commit commit3 = new CommitImpl("a/b", "+c : {}", "Add node a/b/c", + instructions); + command = new CommitCommandMongo(mongoConnection, commit3); + command.execute(); + + // verify if node a is visible in the head revision + NodeExistsCommandMongo isNodeVisible = new NodeExistsCommandMongo( + mongoConnection, "/a", null); + boolean exists = isNodeVisible.execute(); + assertTrue("The node a is not found in the head revision!", exists); + + } + + @Test + public void testTreeDepth() throws Exception { + + String path = "/"; + List instructions = new LinkedList(); + + for (int i = 0; i < 1000; i++) { + instructions.clear(); + instructions.add(new AddNodeInstructionImpl(path, "N" + i)); + Commit commit1 = new CommitImpl(path, "+N" + i + " : {}", + "Add node N" + i, instructions); + CommitCommandMongo command = new CommitCommandMongo( + mongoConnection, commit1); + command.execute(); + path = (path.endsWith("/")) ? (path = path + "N" + i) + : (path = path + "/N" + i); + //System.out.println("*********" + path.length() + "*****"); + } + + } + +} diff --git a/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/ReadBlobCommandMongoTest.java b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/ReadBlobCommandMongoTest.java new file mode 100644 index 0000000..c82e9a9 --- /dev/null +++ b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/ReadBlobCommandMongoTest.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.command; + +import java.io.ByteArrayInputStream; +import java.util.Arrays; + +import junit.framework.Assert; + +import org.apache.jackrabbit.mongomk.BaseMongoTest; +import org.apache.jackrabbit.mongomk.command.ReadBlobCommandMongo; +import org.junit.Test; + +import com.mongodb.gridfs.GridFS; +import com.mongodb.gridfs.GridFSInputFile; + +public class ReadBlobCommandMongoTest extends BaseMongoTest { + + private byte[] blob; + private String blobId; + + @Override + public void setUp() throws Exception { + super.setUp(); + + blob = new byte[100]; + for (int i = 0; i < blob.length; i++) { + blob[i] = (byte) i; + } + ByteArrayInputStream is = new ByteArrayInputStream(blob); + GridFS gridFS = mongoConnection.getGridFS(); + GridFSInputFile gridFSInputFile = gridFS.createFile(is, true); + gridFSInputFile.save(); + blobId = gridFSInputFile.getMD5(); + } + + @Test + public void testReadBlobComplete() throws Exception { + byte[] buffer = new byte[blob.length]; + ReadBlobCommandMongo command = new ReadBlobCommandMongo(mongoConnection, blobId, 0, buffer, 0, blob.length); + int totalBytes = command.execute(); + + Assert.assertEquals(blob.length, totalBytes); + Assert.assertTrue(Arrays.equals(blob, buffer)); + } + + @Test + public void testReadBlobRangeFromEnd() throws Exception { + byte[] buffer = new byte[blob.length / 2]; + ReadBlobCommandMongo command = new ReadBlobCommandMongo(mongoConnection, blobId, (blob.length / 2) - 1, + buffer, 0, blob.length / 2); + int totalBytes = command.execute(); + + Assert.assertEquals(blob.length / 2, totalBytes); + for (int i = 0; i < buffer.length; i++) { + Assert.assertEquals(blob[((blob.length / 2) - 1) + i], buffer[i]); + } + } + + @Test + public void testReadBlobRangeFromStart() throws Exception { + byte[] buffer = new byte[blob.length / 2]; + ReadBlobCommandMongo command = new ReadBlobCommandMongo(mongoConnection, blobId, 0, buffer, 0, + blob.length / 2); + int totalBytes = command.execute(); + + Assert.assertEquals(blob.length / 2, totalBytes); + for (int i = 0; i < buffer.length; i++) { + Assert.assertEquals(blob[i], buffer[i]); + } + } +} diff --git a/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/WriteBlobCommandMongoTest.java b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/WriteBlobCommandMongoTest.java new file mode 100644 index 0000000..0dafe2d --- /dev/null +++ b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/command/WriteBlobCommandMongoTest.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.command; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.util.Arrays; + +import org.apache.jackrabbit.mk.util.IOUtils; +import org.apache.jackrabbit.mongomk.BaseMongoTest; +import org.apache.jackrabbit.mongomk.command.WriteBlobCommandMongo; +import org.junit.Test; + +import com.mongodb.BasicDBObject; +import com.mongodb.gridfs.GridFS; +import com.mongodb.gridfs.GridFSDBFile; + +public class WriteBlobCommandMongoTest extends BaseMongoTest { + + @Test + public void testWriteBlobComplete() throws Exception { + int blobLength = 100; + byte[] blob = createBlob(blobLength); + + WriteBlobCommandMongo command = new WriteBlobCommandMongo(mongoConnection, + new ByteArrayInputStream(blob)); + String blobId = command.execute(); + assertNotNull(blobId); + + byte[] readBlob = new byte[blobLength]; + readBlob(blobId, readBlob); + assertTrue(Arrays.equals(blob, readBlob)); + } + + private byte[] createBlob(int blobLength) { + byte[] blob = new byte[blobLength]; + for (int i = 0; i < blob.length; i++) { + blob[i] = (byte)i; + } + return blob; + } + + private void readBlob(String blobId, byte[] readBlob) throws IOException { + GridFS gridFS = mongoConnection.getGridFS(); + GridFSDBFile gridFile = gridFS.findOne(new BasicDBObject("md5", blobId)); + IOUtils.readFully(gridFile.getInputStream(), readBlob, 0, readBlob.length); + } +} diff --git a/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/query/FetchNodesByPathAndDepthQueryTest.java b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/query/FetchNodesByPathAndDepthQueryTest.java new file mode 100644 index 0000000..d2b804d --- /dev/null +++ b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/query/FetchNodesByPathAndDepthQueryTest.java @@ -0,0 +1,218 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.query; + +import java.util.List; +import java.util.Set; + +import org.apache.jackrabbit.mongomk.BaseMongoTest; +import org.apache.jackrabbit.mongomk.api.model.Node; +import org.apache.jackrabbit.mongomk.impl.NodeAssert; +import org.apache.jackrabbit.mongomk.impl.builder.NodeBuilder; +import org.apache.jackrabbit.mongomk.model.CommitMongo; +import org.apache.jackrabbit.mongomk.model.NodeMongo; +import org.apache.jackrabbit.mongomk.query.FetchNodesByPathAndDepthQuery; +import org.apache.jackrabbit.mongomk.scenario.SimpleNodeScenario; +import org.apache.jackrabbit.mongomk.util.MongoUtil; +import org.junit.Ignore; +import org.junit.Test; + +import com.mongodb.BasicDBObject; +import com.mongodb.DBCollection; +import com.mongodb.DBObject; +import com.mongodb.QueryBuilder; + +/** + * @author result = query.execute(); + List actuals = NodeMongo.toNode(result); + Node expected = NodeBuilder + .build(String + .format("{ \"/#%0$s\" : { \"a#%3$s\" : { \"int\" : 1 , \"double\" : 0.123 , \"b#%3$s\" : { \"string\" : \"foo\" , \"e#%3$s\" : { \"array\" : [ 123, null, 123.456, \"for:bar\", true ] } } , \"d#%3$s\" : { \"null\" : null } } } }", + firstRevisionId, secondRevisionId, + thirdRevisionId)); + Set expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + } + + @Test + public void testFetchWithInvalidLastRevision() throws Exception { + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + String firstRevisionId = scenario.create(); + String secondRevisionId = scenario.update_A_and_add_D_and_E(); + SimpleNodeScenario scenario2 = new SimpleNodeScenario(mongoConnection); + String thirdRevisionId = scenario.update_A_and_add_D_and_E(); + + DBCollection commitCollection = mongoConnection.getCommitCollection(); + DBObject q = QueryBuilder.start(CommitMongo.KEY_REVISION_ID) + .is(MongoUtil.toMongoRepresentation(thirdRevisionId)).get(); + DBObject u = new BasicDBObject(); + u.put("$set", new BasicDBObject(CommitMongo.KEY_FAILED, Boolean.TRUE)); + commitCollection.update(q, u); + + FetchNodesByPathAndDepthQuery query = new FetchNodesByPathAndDepthQuery(mongoConnection, "/", thirdRevisionId, + -1); + List result = query.execute(); + List actuals = NodeMongo.toNode(result); + Node expected = NodeBuilder + .build(String + .format("{ \"/#%1$s\" : { \"a#%2$s\" : { \"int\" : 1 , \"double\" : 0.123 , \"b#%2$s\" : { \"string\" : \"foo\" , \"e#%2$s\" : { \"array\" : [ 123, null, 123.456, \"for:bar\", true ] } } , \"c#%1$s\" : { \"bool\" : true }, \"d#%2$s\" : { \"null\" : null } } } }", + firstRevisionId, secondRevisionId)); + Set expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + } + + @Test + public void testFetchWithInvalidMiddleRevision() throws Exception { + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + String firstRevisionId = scenario.create(); + String secondRevisionId = scenario.update_A_and_add_D_and_E(); + SimpleNodeScenario scenario2 = new SimpleNodeScenario(mongoConnection); + String thirdRevisionId = scenario.update_A_and_add_D_and_E(); + + DBCollection commitCollection = mongoConnection.getCommitCollection(); + DBObject q = QueryBuilder.start(CommitMongo.KEY_REVISION_ID) + .is(MongoUtil.toMongoRepresentation(secondRevisionId)).get(); + DBObject u = new BasicDBObject("$set", new BasicDBObject(CommitMongo.KEY_FAILED, Boolean.TRUE)); + commitCollection.update(q, u); + + q = QueryBuilder.start(CommitMongo.KEY_REVISION_ID).is(MongoUtil.toMongoRepresentation(thirdRevisionId)) + .get(); + u = new BasicDBObject(); + u.put("$set", + new BasicDBObject(CommitMongo.KEY_BASE_REVISION_ID, MongoUtil + .toMongoRepresentation(firstRevisionId))); + commitCollection.update(q, u); + + FetchNodesByPathAndDepthQuery query = new FetchNodesByPathAndDepthQuery(mongoConnection, "/", thirdRevisionId, + -1); + List result = query.execute(); + List actuals = NodeMongo.toNode(result); + Node expected = NodeBuilder + .build(String + .format("{ \"/#%1$s\" : { \"a#%3$s\" : { \"int\" : 1 , \"double\" : 0.123 , \"b#%3$s\" : { \"string\" : \"foo\" , \"e#%3$s\" : { \"array\" : [ 123, null, 123.456, \"for:bar\", true ] } } , \"c#%1$s\" : { \"bool\" : true }, \"d#%3$s\" : { \"null\" : null } } } }", + firstRevisionId, secondRevisionId, + thirdRevisionId)); + Set expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + } + + @Test + public void testSimpleFetchRootAndAllDepths() throws Exception { + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + String firstRevisionId = scenario.create(); + String secondRevisionId = scenario.update_A_and_add_D_and_E(); + + FetchNodesByPathAndDepthQuery query = new FetchNodesByPathAndDepthQuery(mongoConnection, "/", firstRevisionId, + 0); + List result = query.execute(); + List actuals = NodeMongo.toNode(result); + Node expected = NodeBuilder.build(String.format("{ \"/#%1$s\" : {} }", firstRevisionId)); + Set expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + + query = new FetchNodesByPathAndDepthQuery(mongoConnection, "/", secondRevisionId, 0); + result = query.execute(); + actuals = NodeMongo.toNode(result); + expected = NodeBuilder.build(String.format("{ \"/#%1$s\" : {} }", firstRevisionId)); + expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + + query = new FetchNodesByPathAndDepthQuery(mongoConnection, "/", firstRevisionId, 1); + result = query.execute(); + actuals = NodeMongo.toNode(result); + expected = NodeBuilder.build(String.format("{ \"/#%1$s\" : { \"a#%1$s\" : { \"int\" : 1 } } }", + firstRevisionId)); + expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + + query = new FetchNodesByPathAndDepthQuery(mongoConnection, "/", secondRevisionId, 1); + result = query.execute(); + actuals = NodeMongo.toNode(result); + expected = NodeBuilder.build(String.format( + "{ \"/#%1$s\" : { \"a#%2$s\" : { \"int\" : 1 , \"double\" : 0.123 } } }", firstRevisionId, + secondRevisionId)); + expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + + query = new FetchNodesByPathAndDepthQuery(mongoConnection, "/", firstRevisionId, 2); + result = query.execute(); + actuals = NodeMongo.toNode(result); + expected = NodeBuilder + .build(String + .format("{ \"/#%1$s\" : { \"a#%1$s\" : { \"int\" : 1, \"b#%1$s\" : { \"string\" : \"foo\" } , \"c#%1$s\" : { \"bool\" : true } } } }", + firstRevisionId)); + expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + + query = new FetchNodesByPathAndDepthQuery(mongoConnection, "/", secondRevisionId, 2); + result = query.execute(); + actuals = NodeMongo.toNode(result); + expected = NodeBuilder + .build(String + .format("{ \"/#%1$s\" : { \"a#%2$s\" : { \"int\" : 1 , \"double\" : 0.123 , \"b#%2$s\" : { \"string\" : \"foo\" } , \"c#%1$s\" : { \"bool\" : true }, \"d#%2$s\" : { \"null\" : null } } } }", + firstRevisionId, secondRevisionId)); + expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + + query = new FetchNodesByPathAndDepthQuery(mongoConnection, "/", firstRevisionId, -1); + result = query.execute(); + actuals = NodeMongo.toNode(result); + expected = NodeBuilder + .build(String + .format("{ \"/#%1$s\" : { \"a#%1$s\" : { \"int\" : 1 , \"b#%1$s\" : { \"string\" : \"foo\" } , \"c#%1$s\" : { \"bool\" : true } } } }", + firstRevisionId)); + expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + + query = new FetchNodesByPathAndDepthQuery(mongoConnection, "/", secondRevisionId, -1); + result = query.execute(); + actuals = NodeMongo.toNode(result); + expected = NodeBuilder + .build(String + .format("{ \"/#%1$s\" : { \"a#%2$s\" : { \"int\" : 1 , \"double\" : 0.123 , \"b#%2$s\" : { \"string\" : \"foo\", \"e#%2$s\" : { \"array\" : [ 123, null, 123.456, \"for:bar\", true ] } } , \"c#%1$s\" : { \"bool\" : true }, \"d#%2$s\" : { \"null\" : null } } } }", + firstRevisionId, secondRevisionId)); + expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + } +} diff --git a/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/query/FetchNodesForRevisionQueryTest.java b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/query/FetchNodesForRevisionQueryTest.java new file mode 100644 index 0000000..1d6c51c --- /dev/null +++ b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/query/FetchNodesForRevisionQueryTest.java @@ -0,0 +1,188 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.query; + +import java.util.List; +import java.util.Set; + +import org.apache.jackrabbit.mongomk.BaseMongoTest; +import org.apache.jackrabbit.mongomk.api.model.Node; +import org.apache.jackrabbit.mongomk.impl.NodeAssert; +import org.apache.jackrabbit.mongomk.impl.builder.NodeBuilder; +import org.apache.jackrabbit.mongomk.model.CommitMongo; +import org.apache.jackrabbit.mongomk.model.NodeMongo; +import org.apache.jackrabbit.mongomk.scenario.SimpleNodeScenario; +import org.apache.jackrabbit.mongomk.util.MongoUtil; +import org.junit.Ignore; +import org.junit.Test; + +import com.mongodb.BasicDBObject; +import com.mongodb.DBCollection; +import com.mongodb.DBObject; +import com.mongodb.QueryBuilder; + +/** + * @author nodeMongos = query.execute(); + List actuals = NodeMongo.toNode(nodeMongos); + Node expected = NodeBuilder + .build(String + .format("{ \"/#%1$s\" : { \"a#%3$s\" : { \"int\" : 1 , \"double\" : 0.123 , \"b#%3$s\" : { \"string\" : \"foo\" , \"e#%3$s\" : { \"array\" : [ 123, null, 123.456, \"for:bar\", true ] } } , \"c#%1$s\" : { \"bool\" : true }, \"d#%3$s\" : { \"null\" : null } } } }", + firstRevisionId, secondRevisionId, + thirdRevisionId)); + Set expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + } + + @Test + public void testFetchWithInvalidLastRevision() throws Exception { + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + String firstRevisionId = scenario.create(); + String secondRevisionId = scenario.update_A_and_add_D_and_E(); + SimpleNodeScenario scenario2 = new SimpleNodeScenario(mongoConnection); + String thirdRevisionId = scenario.update_A_and_add_D_and_E(); + + DBCollection commitCollection = mongoConnection.getCommitCollection(); + DBObject q = QueryBuilder.start(CommitMongo.KEY_REVISION_ID) + .is(MongoUtil.toMongoRepresentation(thirdRevisionId)).get(); + DBObject u = new BasicDBObject(); + u.put("$set", new BasicDBObject(CommitMongo.KEY_FAILED, Boolean.TRUE)); + commitCollection.update(q, u); + + FetchNodesForRevisionQuery query = new FetchNodesForRevisionQuery(mongoConnection, new String[] { "/", "/a", + "/a/b", "/a/c", "/a/d", "/a/b/e", "not_existing" }, thirdRevisionId); + List nodeMongos = query.execute(); + List actuals = NodeMongo.toNode(nodeMongos); + Node expected = NodeBuilder + .build(String + .format("{ \"/#%1$s\" : { \"a#%2$s\" : { \"int\" : 1 , \"double\" : 0.123 , \"b#%2$s\" : { \"string\" : \"foo\" , \"e#%2$s\" : { \"array\" : [ 123, null, 123.456, \"for:bar\", true ] } } , \"c#%1$s\" : { \"bool\" : true }, \"d#%2$s\" : { \"null\" : null } } } }", + firstRevisionId, secondRevisionId)); + Set expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + } + + @Test + public void testFetchWithInvalidMiddleRevision() throws Exception { + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + String firstRevisionId = scenario.create(); + String secondRevisionId = scenario.update_A_and_add_D_and_E(); + SimpleNodeScenario scenario2 = new SimpleNodeScenario(mongoConnection); + String thirdRevisionId = scenario.update_A_and_add_D_and_E(); + + DBCollection commitCollection = mongoConnection.getCommitCollection(); + DBObject q = QueryBuilder.start(CommitMongo.KEY_REVISION_ID) + .is(MongoUtil.toMongoRepresentation(secondRevisionId)).get(); + DBObject u = new BasicDBObject("$set", new BasicDBObject(CommitMongo.KEY_FAILED, Boolean.TRUE)); + commitCollection.update(q, u); + + q = QueryBuilder.start(CommitMongo.KEY_REVISION_ID).is(MongoUtil.toMongoRepresentation(thirdRevisionId)) + .get(); + u = new BasicDBObject(); + u.put("$set", + new BasicDBObject(CommitMongo.KEY_BASE_REVISION_ID, MongoUtil + .toMongoRepresentation(firstRevisionId))); + commitCollection.update(q, u); + + FetchNodesForRevisionQuery query = new FetchNodesForRevisionQuery(mongoConnection, new String[] { "/", "/a", + "/a/b", "/a/c", "/a/d", "/a/b/e", "not_existing" }, thirdRevisionId); + List nodeMongos = query.execute(); + List actuals = NodeMongo.toNode(nodeMongos); + Node expected = NodeBuilder + .build(String + .format("{ \"/#%1$s\" : { \"a#%3$s\" : { \"int\" : 1 , \"double\" : 0.123 , \"b#%3$s\" : { \"string\" : \"foo\" , \"e#%3$s\" : { \"array\" : [ 123, null, 123.456, \"for:bar\", true ] } } , \"c#%1$s\" : { \"bool\" : true }, \"d#%3$s\" : { \"null\" : null } } } }", + firstRevisionId, secondRevisionId, + thirdRevisionId)); + Set expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + } + + @Test + public void testFetchWithOneRevision() throws Exception { + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + String revisionId = scenario.create(); + + FetchNodesForRevisionQuery query = new FetchNodesForRevisionQuery(mongoConnection, new String[] { "/", "/a", + "/a/b", "/a/c", "not_existing" }, revisionId); + List nodeMongos = query.execute(); + List actuals = NodeMongo.toNode(nodeMongos); + Node expected = NodeBuilder + .build(String + .format("{ \"/#%1$s\" : { \"a#%1$s\" : { \"int\" : 1 , \"b#%1$s\" : { \"string\" : \"foo\" } , \"c#%1$s\" : { \"bool\" : true } } } }", + revisionId)); + Set expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + + query = new FetchNodesForRevisionQuery(mongoConnection, new String[] { "/", "/a", "not_existing" }, revisionId); + nodeMongos = query.execute(); + actuals = NodeMongo.toNode(nodeMongos); + expected = NodeBuilder.build(String.format("{ \"/#%1$s\" : { \"a#%1$s\" : { \"int\" : 1 } } }", + revisionId)); + expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + } + + @Test + public void testFetchWithTwoRevisions() throws Exception { + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + String firstRevisionId = scenario.create(); + String secondRevisionId = scenario.update_A_and_add_D_and_E(); + + FetchNodesForRevisionQuery query = new FetchNodesForRevisionQuery(mongoConnection, new String[] { "/", "/a", + "/a/b", "/a/c", "/a/d", "/a/b/e", "not_existing" }, firstRevisionId); + List nodeMongos = query.execute(); + List actuals = NodeMongo.toNode(nodeMongos); + Node expected = NodeBuilder + .build(String + .format("{ \"/#%1$s\" : { \"a#%1$s\" : { \"int\" : 1 , \"b#%1$s\" : { \"string\" : \"foo\" } , \"c#%1$s\" : { \"bool\" : true } } } }", + firstRevisionId)); + Set expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + + query = new FetchNodesForRevisionQuery(mongoConnection, new String[] { "/", "/a", "/a/b", "/a/c", "/a/d", + "/a/b/e", "not_existing" }, secondRevisionId); + nodeMongos = query.execute(); + actuals = NodeMongo.toNode(nodeMongos); + expected = NodeBuilder + .build(String + .format("{ \"/#%1$s\" : { \"a#%2$s\" : { \"int\" : 1 , \"double\" : 0.123 , \"b#%2$s\" : { \"string\" : \"foo\" , \"e#%2$s\" : { \"array\" : [ 123, null, 123.456, \"for:bar\", true ] } } , \"c#%1$s\" : { \"bool\" : true }, \"d#%2$s\" : { \"null\" : null } } } }", + firstRevisionId, secondRevisionId)); + expecteds = expected.getDescendants(true); + NodeAssert.assertEquals(expecteds, actuals); + } +} diff --git a/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/query/FetchValidCommitsQueryTest.java b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/query/FetchValidCommitsQueryTest.java new file mode 100644 index 0000000..a183b8c --- /dev/null +++ b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/query/FetchValidCommitsQueryTest.java @@ -0,0 +1,125 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.query; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.util.List; + +import org.apache.jackrabbit.mongomk.BaseMongoTest; +import org.apache.jackrabbit.mongomk.model.CommitMongo; +import org.apache.jackrabbit.mongomk.query.FetchValidCommitsQuery; +import org.apache.jackrabbit.mongomk.scenario.SimpleNodeScenario; +import org.junit.Test; + + +public class FetchValidCommitsQueryTest extends BaseMongoTest { + + private static final int MIN_COMMITS = 1; + private static final int SIMPLE_SCENARIO_COMMITS = MIN_COMMITS + 1; + + @Test + public void simple() throws Exception { + FetchValidCommitsQuery query = new FetchValidCommitsQuery(mongoConnection, + String.valueOf(Integer.MAX_VALUE) /*maxRevisionId*/); + List commits = query.execute(); + assertEquals(MIN_COMMITS, commits.size()); + + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + scenario.create(); + commits = query.execute(); + assertEquals(SIMPLE_SCENARIO_COMMITS, commits.size()); + + int numberOfChildren = 3; + scenario.addChildrenToA(numberOfChildren); + commits = query.execute(); + assertEquals(SIMPLE_SCENARIO_COMMITS + numberOfChildren, commits.size()); + } + + @Test + public void revisionId() throws Exception { + FetchValidCommitsQuery query = new FetchValidCommitsQuery(mongoConnection, + String.valueOf(Integer.MAX_VALUE) /*maxRevisionId*/); + List commits = query.execute(); + CommitMongo commit0 = commits.get(0); + + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + scenario.create(); + commits = query.execute(); + CommitMongo commit1 = commits.get(0); + assertTrue(commit0.getRevisionId() < commit1.getRevisionId()); + + int numberOfChildren = 3; + scenario.addChildrenToA(numberOfChildren); + commits = query.execute(); + CommitMongo commit2 = commits.get(0); + assertTrue(commit1.getRevisionId() < commit2.getRevisionId()); + } + + @Test + public void time() throws Exception { + FetchValidCommitsQuery query = new FetchValidCommitsQuery(mongoConnection, + String.valueOf(Integer.MAX_VALUE) /*maxRevisionId*/); + List commits = query.execute(); + CommitMongo commit0 = commits.get(0); + + Thread.sleep(1000); + + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + scenario.create(); + commits = query.execute(); + CommitMongo commit1 = commits.get(0); + assertTrue(commit0.getTimestamp() < commit1.getTimestamp()); + + Thread.sleep(1000); + + int numberOfChildren = 3; + scenario.addChildrenToA(numberOfChildren); + commits = query.execute(); + CommitMongo commit2 = commits.get(0); + assertTrue(commit1.getTimestamp() < commit2.getTimestamp()); + } + + @Test + public void maxEntriesDefaultLimitless() throws Exception { + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + scenario.create(); + + int numberOfChildren = 2; + scenario.addChildrenToA(numberOfChildren); + + int maxEntries = 0; + FetchValidCommitsQuery query = new FetchValidCommitsQuery(mongoConnection, maxEntries); + List commits = query.execute(); + assertEquals(SIMPLE_SCENARIO_COMMITS + numberOfChildren, commits.size()); + } + + @Test + public void maxEntries() throws Exception { + SimpleNodeScenario scenario = new SimpleNodeScenario(mongoConnection); + scenario.create(); + + int numberOfChildren = 2; + scenario.addChildrenToA(numberOfChildren); + + int maxEntries = 2; + FetchValidCommitsQuery query = new FetchValidCommitsQuery(mongoConnection, maxEntries); + List commits = query.execute(); + assertEquals(maxEntries, commits.size()); + } +} diff --git a/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/scenario/SimpleNodeScenario.java b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/scenario/SimpleNodeScenario.java new file mode 100644 index 0000000..758dc4b --- /dev/null +++ b/oak-mongomk/src/test/java/org/apache/jackrabbit/mongomk/scenario/SimpleNodeScenario.java @@ -0,0 +1,147 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mongomk.scenario; + +import java.util.LinkedList; +import java.util.List; + +import org.apache.jackrabbit.mongomk.MongoConnection; +import org.apache.jackrabbit.mongomk.api.model.Commit; +import org.apache.jackrabbit.mongomk.api.model.Instruction; +import org.apache.jackrabbit.mongomk.command.CommitCommandMongo; +import org.apache.jackrabbit.mongomk.impl.model.AddNodeInstructionImpl; +import org.apache.jackrabbit.mongomk.impl.model.AddPropertyInstructionImpl; +import org.apache.jackrabbit.mongomk.impl.model.CommitImpl; +import org.apache.jackrabbit.mongomk.impl.model.RemoveNodeInstructionImpl; + +/** + * Creates a defined scenario in {@code MongoDB}. + * + * @author instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/a", "child" + i)); + Commit commit = new CommitImpl("Add child" + i, "/a", "TODO", instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, commit); + revisionId = command.execute(); + } + return revisionId; + } + + /** + * Deletes the a node. + * + *
+     * "-a"
+     * 
+ * + * @return The {@link RevisionId}. + * @throws Exception + * If an error occurred. + */ + public String delete_A() throws Exception { + List instructions = new LinkedList(); + instructions.add(new RemoveNodeInstructionImpl("/", "a")); + + Commit commit = new CommitImpl("This is a commit with deleted /a", "/", "-a", instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, commit); + String revisionId = command.execute(); + + return revisionId; + } + + public String delete_B() throws Exception { + List instructions = new LinkedList(); + instructions.add(new RemoveNodeInstructionImpl("/a", "b")); + Commit commit = new CommitImpl("This is a commit with deleted /a/b", "/a", "-b", instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, commit); + return command.execute(); + } + + /** + * Updates the following nodes: + * + *
+     * TBD
+     * 
+ * + * @return The {@link RevisionId}. + * @throws Exception + * If an error occurred. + */ + public String update_A_and_add_D_and_E() throws Exception { + List instructions = new LinkedList(); + instructions.add(new AddNodeInstructionImpl("/a", "d")); + instructions.add(new AddNodeInstructionImpl("/a/b", "e")); + instructions.add(new AddPropertyInstructionImpl("/a", "double", 0.123D)); + instructions.add(new AddPropertyInstructionImpl("/a/d", "null", null)); + instructions.add(new AddPropertyInstructionImpl("/a/b/e", "array", new Object[] { 123, null, 123.456D, + "for:bar", Boolean.TRUE })); + + Commit commit = new CommitImpl("This is a commit with updated /a and added /a/d and /a/b/e", "", "TODO", + instructions); + CommitCommandMongo command = new CommitCommandMongo(mongoConnection, commit); + String revisionId = command.execute(); + + return revisionId; + } +} diff --git a/oak-mongomk/src/test/resources/config.cfg b/oak-mongomk/src/test/resources/config.cfg new file mode 100644 index 0000000..39ce31b --- /dev/null +++ b/oak-mongomk/src/test/resources/config.cfg @@ -0,0 +1,28 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file contains configuration properties for the MongoDB related services. These properties are +# required for the SharedCloud MicroKernel integration tests. +# +# Please be cautious with any blanklines and whitespaces! + +# The host of the running mongodb or mongos process +host = 127.0.0.1 + +# The port of the running mongodb or mongos process +port = 27017 + +# The database to use +db = MongoMicroKernelTest \ No newline at end of file diff --git a/pom.xml b/pom.xml index be9ebf4..928d5e8 100644 --- a/pom.xml +++ b/pom.xml @@ -88,6 +88,16 @@ + mongomk + + oak-mongomk-api + oak-mongomk-impl + oak-mongomk + oak-mongomk-test + oak-mongomk-perf + + + apache-release ${user.name} -- 1.7.7.3