diff --git a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/MissingBlobStoreFactory.java b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/MissingBlobStoreFactory.java new file mode 100644 index 0000000..b98a3e1 --- /dev/null +++ b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/MissingBlobStoreFactory.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.blob; + +import org.apache.jackrabbit.oak.segment.file.tooling.BasicReadOnlyBlobStore; +import org.apache.jackrabbit.oak.spi.blob.BlobStore; + +import com.google.common.io.Closer; + +public class MissingBlobStoreFactory implements BlobStoreFactory { + + @Override + public BlobStore create(Closer closer) { + return new BasicReadOnlyBlobStore(); + } + + @Override + public String toString() { + return "MissingBlobStore"; + } +} diff --git a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/OptionParserFactory.java b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/OptionParserFactory.java index 151e4bf..0546d48 100644 --- a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/OptionParserFactory.java +++ b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/OptionParserFactory.java @@ -50,6 +50,8 @@ public class OptionParserFactory { public static final String SRC_S3_CONFIG = "src-s3config"; + public static final String SRC_MBS = "src-missingblobstore"; + public static final String DST_FDS = "datastore"; public static final String DST_FBS = "fileblobstore"; @@ -101,6 +103,7 @@ public class OptionParserFactory { op.accepts(DST_S3, "Datastore directory to be used for the target S3").withRequiredArg().ofType(String.class); op.accepts(DST_S3_CONFIG, "Configuration file for the target S3DataStore").withRequiredArg() .ofType(String.class); + op.accepts(SRC_MBS, "Use a dummy BinaryStore"); } private static void addRdbOptions(OptionParser op) { diff --git a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/StoreArguments.java b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/StoreArguments.java index 71e61d0..ade2b91 100644 --- a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/StoreArguments.java +++ b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/StoreArguments.java @@ -27,6 +27,7 @@ import org.apache.jackrabbit.oak.upgrade.cli.blob.BlobStoreFactory; import org.apache.jackrabbit.oak.upgrade.cli.blob.DummyBlobStoreFactory; import org.apache.jackrabbit.oak.upgrade.cli.blob.FileBlobStoreFactory; import org.apache.jackrabbit.oak.upgrade.cli.blob.FileDataStoreFactory; +import org.apache.jackrabbit.oak.upgrade.cli.blob.MissingBlobStoreFactory; import org.apache.jackrabbit.oak.upgrade.cli.blob.S3DataStoreFactory; import org.apache.jackrabbit.oak.upgrade.cli.node.StoreFactory; import org.slf4j.Logger; @@ -34,6 +35,7 @@ import org.slf4j.LoggerFactory; import static org.apache.jackrabbit.oak.upgrade.cli.parser.OptionParserFactory.SRC_FBS; import static org.apache.jackrabbit.oak.upgrade.cli.parser.OptionParserFactory.SRC_FDS; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.OptionParserFactory.SRC_MBS; import static org.apache.jackrabbit.oak.upgrade.cli.parser.OptionParserFactory.SRC_S3; import static org.apache.jackrabbit.oak.upgrade.cli.parser.OptionParserFactory.SRC_S3_CONFIG; import static org.apache.jackrabbit.oak.upgrade.cli.parser.OptionParserFactory.DST_FBS; @@ -96,6 +98,8 @@ public class StoreArguments { factory = new S3DataStoreFactory(parser.getOption(SRC_S3_CONFIG), parser.getOption(SRC_S3)); } else if (parser.hasOption(SRC_FDS)) { factory = new FileDataStoreFactory(parser.getOption(SRC_FDS)); + } else if (parser.hasOption(SRC_MBS)) { + factory = new MissingBlobStoreFactory(); } else { factory = new DummyBlobStoreFactory(); } diff --git a/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/MissingBlobStoreTest.java b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/MissingBlobStoreTest.java new file mode 100644 index 0000000..baf11e5 --- /dev/null +++ b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/MissingBlobStoreTest.java @@ -0,0 +1,99 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.blob; + +import org.apache.jackrabbit.oak.upgrade.cli.AbstractOak2OakTest; +import org.apache.jackrabbit.oak.upgrade.cli.container.BlobStoreContainer; +import org.apache.jackrabbit.oak.upgrade.cli.container.FileBlobStoreContainer; +import org.apache.jackrabbit.oak.upgrade.cli.container.FileDataStoreContainer; +import org.apache.jackrabbit.oak.upgrade.cli.container.MongoNodeStoreContainer; +import org.apache.jackrabbit.oak.upgrade.cli.container.NodeStoreContainer; +import org.apache.jackrabbit.oak.upgrade.cli.container.S3DataStoreContainer; +import org.apache.jackrabbit.oak.upgrade.cli.container.SegmentNodeStoreContainer; +import org.apache.jackrabbit.oak.upgrade.cli.container.SegmentTarNodeStoreContainer; +import org.junit.Assume; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; + +@RunWith(Parameterized.class) +public class MissingBlobStoreTest extends AbstractOak2OakTest { + + private static final Logger log = LoggerFactory.getLogger(MissingBlobStoreTest.class); + + @Parameterized.Parameters(name = "{0}") + public static Collection data() { + List params = new ArrayList(); + BlobStoreContainer blob; + + blob = new FileDataStoreContainer(); + params.add(new Object[] { "Segment -> Segment (FDS)", new SegmentNodeStoreContainer(blob), new SegmentNodeStoreContainer(blob), true }); + params.add(new Object[] { "Segment -> SegmentTar (FDS)", new SegmentNodeStoreContainer(blob), new SegmentTarNodeStoreContainer(blob), true }); + params.add(new Object[] { "SegmentTar -> Segment (FDS)", new SegmentTarNodeStoreContainer(blob), new SegmentNodeStoreContainer(blob), true }); + params.add(new Object[] { "SegmentTar -> SegmentTar (FDS)", new SegmentTarNodeStoreContainer(blob), new SegmentTarNodeStoreContainer(blob), true }); + try { + params.add(new Object[] { "Mongo -> Mongo (FDS)", new MongoNodeStoreContainer(blob), new MongoNodeStoreContainer(blob), false }); + // params.add(new Object[] { "Segment -> Mongo (FDS)", new SegmentNodeStoreContainer(blob), new MongoNodeStoreContainer(blob), false }); + // params.add(new Object[] { "SegmentTar -> Mongo (FDS)", new SegmentTarNodeStoreContainer(blob), new MongoNodeStoreContainer(blob), false }); + params.add(new Object[] { "Mongo -> Segment (FDS)", new MongoNodeStoreContainer(blob), new SegmentNodeStoreContainer(blob), false }); + } catch (IOException e) { + log.error("Can't create Mongo -> Mongo case", e); + } + + // blob = new FileBlobStoreContainer(); + // params.add(new Object[] { "SegmentTar -> SegmentTar (FBS)", new SegmentTarNodeStoreContainer(blob), new SegmentTarNodeStoreContainer(blob), true }); + return params; + } + + private final NodeStoreContainer source; + + private final NodeStoreContainer destination; + + private final boolean supportsCheckpoint; + + public MissingBlobStoreTest(String name, NodeStoreContainer source, NodeStoreContainer destination, boolean supportsCheckpoint) { + this.source = source; + this.destination = destination; + this.supportsCheckpoint = supportsCheckpoint; + } + + @Override + protected NodeStoreContainer getSourceContainer() { + return source; + } + + @Override + protected NodeStoreContainer getDestinationContainer() { + return destination; + } + + @Override + protected String[] getArgs() { + return new String[] { "--src-missingblobstore", source.getDescription(), destination.getDescription() }; + } + + protected boolean supportsCheckpointMigration() { + return supportsCheckpoint; + } +}