diff --git a/oak-doc/src/site/markdown/migration.md b/oak-doc/src/site/markdown/migration.md index db59d98..1089e58 100644 --- a/oak-doc/src/site/markdown/migration.md +++ b/oak-doc/src/site/markdown/migration.md @@ -215,11 +215,13 @@ When migrating an old SegmentMK repository (pre-Oak 1.6) to the new SegmentMK (O * custom include-, exclude- or merge- paths are specified or * the binaries are copied by references, no source datastore is specified and two different checkpoints contains different binary under the same path. -In the second case oak-upgrade emits following warning: +In the second case oak-upgrade emits following warning and breaks: - Checkpoints won't be copied, because no external datastore has been specified. This will result in the full repository reindexing on the first start. + Checkpoints won't be copied, because no external datastore has been specified. This will result in the full repository reindexing on the first start. Use --skip-checkpoints to force the migration or see https://jackrabbit.apache.org/oak/docs/migration.html#Checkpoints_migration for more info. -The easiest way to fix this issue is specifying the source datastore in the command line options (eg. `--src-datastore` or `--src-s3datastore`). The warning may also be ignored, but in this case the repository will be fully reindexing on the first startup, which may be a long process, especially for large instances. Repository won't be usable until the reindexing process is done. +The easiest way to fix this issue is specifying the source datastore in the command line options (eg. `--src-datastore` or `--src-s3datastore`). + +The warning may also be ignored, but in this case the repository will be fully reindexed on the first startup. It may be a long process, especially for the big instance. Repository won't be usable until the reindexing process is done. Use `--skip-checkpoints` option to suppress the warning. ## Online blob migration with SplitBlobStore diff --git a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositorySidegrade.java b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositorySidegrade.java index 16d7103..cb1bc69 100755 --- a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositorySidegrade.java +++ b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositorySidegrade.java @@ -103,6 +103,8 @@ public class RepositorySidegrade { */ private Set mergePaths = DEFAULT_MERGE_PATHS; + private boolean skipCheckpoints = false; + private boolean includeIndex = false; private boolean filterLongNames = true; @@ -233,6 +235,10 @@ public class RepositorySidegrade { this.onlyVerify = onlyVerify; } + public void setSkipCheckpoints(boolean skipCheckpoints) { + this.skipCheckpoints = skipCheckpoints; + } + /** * Same as {@link #copy(RepositoryInitializer)}, but with no custom initializer. * @@ -304,14 +310,16 @@ public class RepositorySidegrade { if (!isCompleteMigration()) { LOG.info("Custom paths have been specified, checkpoints won't be migrated"); isRemoveCheckpointReferences = true; + } else if (skipCheckpoints) { + LOG.info("Checkpoints won't be migrated because of the --skip-checkpoints option"); + isRemoveCheckpointReferences = true; } else { boolean checkpointsCopied; try { checkpointsCopied = copyCheckpoints(targetRoot); } catch(UnsupportedOperationException e) { removeCheckpoints(); - checkpointsCopied = false; - LOG.warn("Checkpoints won't be copied, because no external datastore has been specified. This will result in the full repository reindexing on the first start. See https://jackrabbit.apache.org/oak/docs/migration.html#Checkpoints_migration for more info."); + throw new RepositoryException("Checkpoints won't be copied, because no external datastore has been specified. This will result in the full repository reindexing on the first start. Use --skip-checkpoints to force the migration or see https://jackrabbit.apache.org/oak/docs/migration.html#Checkpoints_migration for more info."); } if (!checkpointsCopied) { LOG.info("Copying checkpoints is not supported for this combination of node stores"); diff --git a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/MigrationFactory.java b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/MigrationFactory.java index b904711..ebca719 100644 --- a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/MigrationFactory.java +++ b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/MigrationFactory.java @@ -117,6 +117,7 @@ public class MigrationFactory { sidegrade.setIncludeIndex(options.isIncludeIndex()); sidegrade.setVerify(options.isVerify()); sidegrade.setOnlyVerify(options.isOnlyVerify()); + sidegrade.setSkipCheckpoints(options.isSkipCheckpoints()); return sidegrade; } diff --git a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/MigrationOptions.java b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/MigrationOptions.java index 29a899d..7759388 100644 --- a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/MigrationOptions.java +++ b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/MigrationOptions.java @@ -64,6 +64,8 @@ public class MigrationOptions { private final boolean onlyVerify; + private final boolean skipCheckpoints; + private final String srcUser; private final String srcPassword; @@ -122,6 +124,7 @@ public class MigrationOptions { this.ignoreMissingBinaries = args.hasOption(OptionParserFactory.IGNORE_MISSING_BINARIES); this.verify = args.hasOption(OptionParserFactory.VERIFY); this.onlyVerify = args.hasOption(OptionParserFactory.ONLY_VERIFY); + this.skipCheckpoints = args.hasOption(OptionParserFactory.SKIP_CHECKPOINTS); this.srcUser = args.getOption(OptionParserFactory.SRC_USER); this.srcPassword = args.getOption(OptionParserFactory.SRC_USER); @@ -210,6 +213,10 @@ public class MigrationOptions { return onlyVerify; } + public boolean isSkipCheckpoints() { + return skipCheckpoints; + } + public String getSrcUser() { return srcUser; } @@ -343,6 +350,10 @@ public class MigrationOptions { log.info("Source DataStore external blobs: {}", srcExternalBlobs); } + if (skipCheckpoints) { + log.info("Checkpoints won't be migrated"); + } + log.info("Cache size: {} MB", cacheSizeInMB); } diff --git a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/OptionParserFactory.java b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/OptionParserFactory.java index 35bdda3..24210ed 100644 --- a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/OptionParserFactory.java +++ b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/OptionParserFactory.java @@ -82,6 +82,8 @@ public class OptionParserFactory { public static final String ONLY_VERIFY = "only-verify"; + public static final String SKIP_CHECKPOINTS = "skip-checkpoints"; + public static OptionParser create() { OptionParser op = new OptionParser(); addUsageOptions(op); @@ -153,5 +155,6 @@ public class OptionParserFactory { op.accepts(SKIP_NAME_CHECK, "Skip the initial phase of testing node name lengths"); op.accepts(VERIFY, "After the sidegrade check whether the source repository is exactly the same as destination"); op.accepts(ONLY_VERIFY, "Performs only --" + VERIFY + ", without copying content"); + op.accepts(SKIP_CHECKPOINTS, "Don't copy checkpoints on the full segment->segment migration"); } } diff --git a/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/CopyCheckpointsTest.java b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/CopyCheckpointsTest.java new file mode 100644 index 0000000..ba3dbc4 --- /dev/null +++ b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/CopyCheckpointsTest.java @@ -0,0 +1,168 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade; + +import com.google.common.base.Joiner; +import org.apache.jackrabbit.oak.spi.state.NodeStore; +import org.apache.jackrabbit.oak.upgrade.cli.AbstractOak2OakTest; +import org.apache.jackrabbit.oak.upgrade.cli.OakUpgrade; +import org.apache.jackrabbit.oak.upgrade.cli.container.BlobStoreContainer; +import org.apache.jackrabbit.oak.upgrade.cli.container.FileDataStoreContainer; +import org.apache.jackrabbit.oak.upgrade.cli.container.NodeStoreContainer; +import org.apache.jackrabbit.oak.upgrade.cli.container.SegmentNodeStoreContainer; +import org.apache.jackrabbit.oak.upgrade.cli.container.SegmentTarNodeStoreContainer; +import org.apache.jackrabbit.oak.upgrade.cli.parser.CliArgumentException; +import org.apache.jackrabbit.oak.upgrade.cli.parser.DatastoreArguments; +import org.apache.jackrabbit.oak.upgrade.cli.parser.MigrationCliArguments; +import org.apache.jackrabbit.oak.upgrade.cli.parser.MigrationOptions; +import org.apache.jackrabbit.oak.upgrade.cli.parser.OptionParserFactory; +import org.apache.jackrabbit.oak.upgrade.cli.parser.StoreArguments; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.jcr.RepositoryException; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import static java.util.Arrays.asList; +import static org.junit.Assert.fail; + +@RunWith(Parameterized.class) +public class CopyCheckpointsTest extends AbstractOak2OakTest { + + private enum Result { + EXCEPTION, CHECKPOINTS_MISSING, CHECKPOINTS_COPIED + } + + private static final Logger log = LoggerFactory.getLogger(CopyCheckpointsTest.class); + + @Parameterized.Parameters(name = "{0}") + public static Collection data() throws IOException { + List params = new ArrayList(); + + BlobStoreContainer blob = new FileDataStoreContainer(); + params.add(new Object[]{ + "Fails on missing blobstore", + new SegmentNodeStoreContainer(blob), + new SegmentNodeStoreContainer(blob), + asList(), + Result.EXCEPTION + }); + params.add(new Object[]{ + "Suppress the warning", + new SegmentNodeStoreContainer(blob), + new SegmentNodeStoreContainer(blob), + asList("--skip-checkpoints"), + Result.CHECKPOINTS_MISSING + }); + params.add(new Object[]{ + "Source data store defined, checkpoints migrated", + new SegmentTarNodeStoreContainer(blob), + new SegmentTarNodeStoreContainer(blob), + asList("--src-datastore=" + blob.getDescription()), + Result.CHECKPOINTS_COPIED + }); + return params; + } + + private final NodeStoreContainer source; + + private final NodeStoreContainer destination; + + private final List args; + + private final Result expectedResult; + + public CopyCheckpointsTest(String name, NodeStoreContainer source, NodeStoreContainer destination, List args, Result expectedResult) throws IOException, CliArgumentException { + this.source = source; + this.destination = destination; + this.args = args; + this.expectedResult = expectedResult; + + this.source.clean(); + this.destination.clean(); + } + + @Override + protected NodeStoreContainer getSourceContainer() { + return source; + } + + @Override + protected NodeStoreContainer getDestinationContainer() { + return destination; + } + + @Override + protected String[] getArgs() { + List result = new ArrayList<>(args); + result.addAll(asList("--disable-mmap", source.getDescription(), destination.getDescription())); + return result.toArray(new String[result.size()]); + } + + @Before + @Override + public void prepare() throws Exception { + NodeStore source = getSourceContainer().open(); + try { + initContent(source); + } finally { + getSourceContainer().close(); + } + + String[] args = getArgs(); + log.info("oak2oak {}", Joiner.on(' ').join(args)); + try { + MigrationCliArguments cliArgs = new MigrationCliArguments(OptionParserFactory.create().parse(args)); + MigrationOptions options = new MigrationOptions(cliArgs); + StoreArguments stores = new StoreArguments(options, cliArgs.getArguments()); + DatastoreArguments datastores = new DatastoreArguments(options, stores, stores.srcUsesEmbeddedDatastore()); + OakUpgrade.migrate(options, stores, datastores); + } catch(RuntimeException e) { + if (expectedResult == Result.EXCEPTION) { + return; + } else { + throw e; + } + } + if (expectedResult == Result.EXCEPTION) { + fail("Migration should fail"); + } + createSession(); + } + + @Test + @Override + public void validateMigration() throws RepositoryException, IOException, CliArgumentException { + switch (expectedResult) { + case CHECKPOINTS_COPIED: + verifyCheckpoint(); + break; + + case CHECKPOINTS_MISSING: + verifyEmptyAsync(); + break; + } + } +} \ No newline at end of file diff --git a/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/AbstractOak2OakTest.java b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/AbstractOak2OakTest.java index 2e9839a..b7a7381 100644 --- a/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/AbstractOak2OakTest.java +++ b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/AbstractOak2OakTest.java @@ -216,7 +216,7 @@ public abstract class AbstractOak2OakTest { } } - private void verifyCheckpoint() { + protected void verifyCheckpoint() { assertEquals("after", destination.getRoot().getString("checkpoint-state")); Map info = destination.checkpointInfo(checkpointReference); @@ -250,7 +250,7 @@ public abstract class AbstractOak2OakTest { } // OAK-2869 - private void verifyEmptyAsync() { + protected void verifyEmptyAsync() { NodeState state = destination.getRoot().getChildNode(":async"); assertFalse(state.hasProperty("test")); } diff --git a/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/CopyBinariesTest.java b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/CopyBinariesTest.java index c8be9b8..cd846c5b 100644 --- a/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/CopyBinariesTest.java +++ b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/CopyBinariesTest.java @@ -65,88 +65,77 @@ public class CopyBinariesTest extends AbstractOak2OakTest { new SegmentNodeStoreContainer(blob), new SegmentNodeStoreContainer(blob), asList(), - DatastoreArguments.BlobMigrationCase.COPY_REFERENCES, - false + DatastoreArguments.BlobMigrationCase.COPY_REFERENCES }); params.add(new Object[]{ "Copy references, no blobstores defined, segment-tar -> segment-tar", new SegmentTarNodeStoreContainer(blob), new SegmentTarNodeStoreContainer(blob), asList(), - DatastoreArguments.BlobMigrationCase.COPY_REFERENCES, - false + DatastoreArguments.BlobMigrationCase.COPY_REFERENCES }); params.add(new Object[]{ "Copy references, no blobstores defined, segment -> segment-tar", new SegmentNodeStoreContainer(blob), new SegmentTarNodeStoreContainer(blob), asList(), - DatastoreArguments.BlobMigrationCase.COPY_REFERENCES, - false + DatastoreArguments.BlobMigrationCase.COPY_REFERENCES }); params.add(new Object[]{ "Copy references, no blobstores defined, document -> segment-tar", new JdbcNodeStoreContainer(blob), new SegmentNodeStoreContainer(blob), asList("--src-user=sa", "--src-password=sa"), - DatastoreArguments.BlobMigrationCase.COPY_REFERENCES, - false + DatastoreArguments.BlobMigrationCase.COPY_REFERENCES }); params.add(new Object[]{ "Copy references, no blobstores defined, segment-tar -> document", new SegmentTarNodeStoreContainer(blob), new JdbcNodeStoreContainer(blob), asList("--user=sa", "--password=sa"), - DatastoreArguments.BlobMigrationCase.UNSUPPORTED, - false + DatastoreArguments.BlobMigrationCase.UNSUPPORTED }); params.add(new Object[]{ "Missing source, external destination", new SegmentTarNodeStoreContainer(blob), new SegmentTarNodeStoreContainer(blob), asList("--datastore=" + blob.getDescription()), - DatastoreArguments.BlobMigrationCase.UNSUPPORTED, - false + DatastoreArguments.BlobMigrationCase.UNSUPPORTED }); params.add(new Object[]{ "Copy embedded to embedded, no blobstores defined", new SegmentTarNodeStoreContainer(), new SegmentTarNodeStoreContainer(), asList(), - DatastoreArguments.BlobMigrationCase.EMBEDDED_TO_EMBEDDED, - true + DatastoreArguments.BlobMigrationCase.EMBEDDED_TO_EMBEDDED }); params.add(new Object[]{ "Copy embedded to external, no blobstores defined", new SegmentTarNodeStoreContainer(), new SegmentTarNodeStoreContainer(blob), asList("--datastore=" + blob.getDescription()), - DatastoreArguments.BlobMigrationCase.EMBEDDED_TO_EXTERNAL, - true + DatastoreArguments.BlobMigrationCase.EMBEDDED_TO_EXTERNAL }); params.add(new Object[]{ "Copy references, src blobstore defined", new SegmentTarNodeStoreContainer(blob), new SegmentTarNodeStoreContainer(blob), asList("--src-datastore=" + blob.getDescription()), - DatastoreArguments.BlobMigrationCase.COPY_REFERENCES, - true + DatastoreArguments.BlobMigrationCase.COPY_REFERENCES }); params.add(new Object[]{ "Copy external to embedded, src blobstore defined", new SegmentTarNodeStoreContainer(blob), new SegmentTarNodeStoreContainer(), asList("--copy-binaries", "--src-datastore=" + blob.getDescription()), - DatastoreArguments.BlobMigrationCase.EXTERNAL_TO_EMBEDDED, - true + DatastoreArguments.BlobMigrationCase.EXTERNAL_TO_EMBEDDED }); params.add(new Object[]{ "Copy external to external, src blobstore defined", new SegmentTarNodeStoreContainer(blob), new SegmentTarNodeStoreContainer(blob2), asList("--copy-binaries", "--src-datastore=" + blob.getDescription(), "--datastore=" + blob2.getDescription()), - DatastoreArguments.BlobMigrationCase.EXTERNAL_TO_EXTERNAL, - true + DatastoreArguments.BlobMigrationCase.EXTERNAL_TO_EXTERNAL }); return params; } @@ -159,14 +148,11 @@ public class CopyBinariesTest extends AbstractOak2OakTest { private final DatastoreArguments.BlobMigrationCase blobMigrationCase; - private final boolean supportsCheckpointMigration; - - public CopyBinariesTest(String name, NodeStoreContainer source, NodeStoreContainer destination, List args, DatastoreArguments.BlobMigrationCase blobMigrationCase, boolean supportsCheckpointMigration) throws IOException, CliArgumentException { + public CopyBinariesTest(String name, NodeStoreContainer source, NodeStoreContainer destination, List args, DatastoreArguments.BlobMigrationCase blobMigrationCase) throws IOException, CliArgumentException { this.source = source; this.destination = destination; this.args = args; this.blobMigrationCase = blobMigrationCase; - this.supportsCheckpointMigration = supportsCheckpointMigration; this.source.clean(); this.destination.clean(); @@ -226,9 +212,4 @@ public class CopyBinariesTest extends AbstractOak2OakTest { } super.validateMigration(); } - - @Override - protected boolean supportsCheckpointMigration() { - return supportsCheckpointMigration; - } } \ No newline at end of file diff --git a/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/container/SegmentNodeStoreContainer.java b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/container/SegmentNodeStoreContainer.java index 05bb008..98b30db 100644 --- a/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/container/SegmentNodeStoreContainer.java +++ b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/container/SegmentNodeStoreContainer.java @@ -72,7 +72,10 @@ public class SegmentNodeStoreContainer implements NodeStoreContainer { @Override public void close() { - fs.close(); + if (fs != null) { + fs.close(); + fs = null; + } } @Override