diff --git a/oak-upgrade/pom.xml b/oak-upgrade/pom.xml
index f5a0ca116b..862a243637 100644
--- a/oak-upgrade/pom.xml
+++ b/oak-upgrade/pom.xml
@@ -86,6 +86,11 @@
oak-blob-cloud
${project.version}
+
+ org.apache.jackrabbit
+ oak-blob-cloud-azure
+ ${project.version}
+
org.apache.jackrabbit
oak-core-spi
diff --git a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/AzureDataStoreFactory.java b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/AzureDataStoreFactory.java
new file mode 100644
index 0000000000..e0b33d0a20
--- /dev/null
+++ b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/blob/AzureDataStoreFactory.java
@@ -0,0 +1,170 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.cli.blob;
+
+import java.io.Closeable;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Properties;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.jcr.RepositoryException;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.jackrabbit.core.data.DataStoreException;
+import org.apache.jackrabbit.oak.blob.cloud.azure.blobstorage.AzureDataStore;
+import org.apache.jackrabbit.oak.commons.PropertiesUtil;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore;
+import org.apache.jackrabbit.oak.spi.blob.BlobStore;
+import org.apache.jackrabbit.oak.stats.DefaultStatisticsProvider;
+import org.apache.jackrabbit.oak.stats.StatisticsProvider;
+
+import com.google.common.collect.Maps;
+import com.google.common.io.Closer;
+import com.google.common.io.Files;
+
+public class AzureDataStoreFactory implements BlobStoreFactory {
+
+ private static final Pattern STRIP_VALUE_PATTERN = Pattern.compile("^[TILFDXSCB]?\"(.*)\"\\W*$");
+
+ private final Properties props;
+
+ private final String directory;
+
+ private final File tempHomeDir;
+
+ private final boolean ignoreMissingBlobs;
+
+ public AzureDataStoreFactory(String configuration, String directory, boolean ignoreMissingBlobs) throws IOException {
+ this.props = new Properties();
+ FileReader reader = new FileReader(new File(configuration));
+ try {
+ props.load(reader);
+ } finally {
+ IOUtils.closeQuietly(reader);
+ }
+
+ // Default directory
+
+ this.directory = directory;
+ this.tempHomeDir = Files.createTempDir();
+ this.ignoreMissingBlobs = ignoreMissingBlobs;
+ }
+
+ @Override
+ public BlobStore create(Closer closer) throws IOException {
+ AzureDataStore delegate = createDS(directory, props);
+ // Initialize a default stats provider
+ final ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();
+ StatisticsProvider statsProvider = new DefaultStatisticsProvider(executor);
+ delegate.setStatisticsProvider(statsProvider);
+ // Reduce staging purge interval to 60 seconds
+ delegate.setStagingPurgeInterval(60);
+
+ try {
+ delegate.init(tempHomeDir.getPath());
+ } catch (RepositoryException e) {
+ throw new IOException(e);
+ }
+ closer.register(asCloseable(delegate));
+ closer.register(new Closeable() {
+ @Override
+ public void close() throws IOException {
+ executor.shutdown();
+ try {
+ if (!executor.awaitTermination(30, TimeUnit.SECONDS)) {
+ throw new IOException("Can't shut down the executor");
+ }
+ } catch (InterruptedException e) {
+ throw new IOException(e);
+ }
+ }
+ });
+ if (ignoreMissingBlobs) {
+ return new SafeDataStoreBlobStore(delegate);
+ } else {
+ return new DataStoreBlobStore(delegate);
+ }
+ }
+
+ static AzureDataStore createDS(String directory, Properties props) {
+ Properties strippedProperties = new Properties();
+ Map map = Maps.newHashMap();
+
+ // Default path value as-per OAK-6632
+ if(StringUtils.isEmpty(directory) && !map.containsKey("path")) {
+ directory = System.getProperty("java.io.tmpdir");
+ }
+
+ for (Object key : new HashSet<>(props.keySet())) {
+ String strippedValue = stripValue(props.getProperty((String) key));
+
+ strippedProperties.put(key, strippedValue);
+ map.put((String) key, strippedValue);
+ }
+
+ AzureDataStore ds = new AzureDataStore();
+ ds.setProperties(strippedProperties);
+ ds.setPath(directory);
+
+ PropertiesUtil.populate(ds, map, false);
+ return ds;
+ }
+
+ private static Closeable asCloseable(final AzureDataStore store) {
+ return new Closeable() {
+ @Override
+ public void close() throws IOException {
+ try {
+ while (store.getStats().get(1).getElementCount() > 0) {
+ Thread.sleep(100);
+ }
+ } catch (InterruptedException e) {
+ throw new IOException(e);
+ } finally {
+ try {
+ store.close();
+ } catch (DataStoreException e) {
+ throw new IOException(e);
+ }
+ }
+ }
+ };
+ }
+
+ static String stripValue(String value) {
+ Matcher matcher = STRIP_VALUE_PATTERN.matcher(value);
+ if (matcher.matches()) {
+ return matcher.group(1);
+ } else {
+ return value;
+ }
+ }
+
+ @Override
+ public String toString() {
+ return String.format("AzureDataStore[%s]", directory);
+ }
+}
diff --git a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/DatastoreArguments.java b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/DatastoreArguments.java
index 181bbd7491..221ab1f5d5 100644
--- a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/DatastoreArguments.java
+++ b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/DatastoreArguments.java
@@ -25,6 +25,7 @@ import org.apache.jackrabbit.oak.upgrade.cli.blob.FileBlobStoreFactory;
import org.apache.jackrabbit.oak.upgrade.cli.blob.FileDataStoreFactory;
import org.apache.jackrabbit.oak.upgrade.cli.blob.LoopbackBlobStoreFactory;
import org.apache.jackrabbit.oak.upgrade.cli.blob.S3DataStoreFactory;
+import org.apache.jackrabbit.oak.upgrade.cli.blob.AzureDataStoreFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -140,7 +141,9 @@ public class DatastoreArguments {
return new FileBlobStoreFactory(options.getSrcFbs());
} else if (options.isSrcS3()) {
return new S3DataStoreFactory(options.getSrcS3Config(), options.getSrcS3(), ignoreMissingBinaries);
- } else if (options.isSrcFds()) {
+ } else if (options.isSrcAzure()) {
+ return new AzureDataStoreFactory(options.getSrcAzureConfig(), options.getSrcAzure(), ignoreMissingBinaries);
+ } else if (options.isSrcFds()) {
return new FileDataStoreFactory(options.getSrcFds(), ignoreMissingBinaries);
} else {
return null;
@@ -152,6 +155,8 @@ public class DatastoreArguments {
return new FileBlobStoreFactory(options.getDstFbs());
} else if (options.isDstS3()) {
return new S3DataStoreFactory(options.getDstS3Config(), options.getDstS3(), false);
+ } else if (options.isDstAzure()) {
+ return new AzureDataStoreFactory(options.getDstAzureConfig(), options.getDstAzure(), false);
} else if (options.isDstFds()) {
return new FileDataStoreFactory(options.getDstFds(), false);
} else {
diff --git a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/MigrationOptions.java b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/MigrationOptions.java
index c7c3ae8468..b344cb402d 100644
--- a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/MigrationOptions.java
+++ b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/MigrationOptions.java
@@ -83,6 +83,10 @@ public class MigrationOptions {
private final String srcS3Config;
private final String srcS3;
+
+ private final String srcAzureConfig;
+
+ private final String srcAzure;
private final String dstFbs;
@@ -91,6 +95,10 @@ public class MigrationOptions {
private final String dstS3Config;
private final String dstS3;
+
+ private final String dstAzureConfig;
+
+ private final String dstAzure;
private final Boolean srcExternalBlobs;
@@ -139,11 +147,15 @@ public class MigrationOptions {
this.srcFds = args.getOption(OptionParserFactory.SRC_FDS);
this.srcS3 = args.getOption(OptionParserFactory.SRC_S3);
this.srcS3Config = args.getOption(OptionParserFactory.SRC_S3_CONFIG);
+ this.srcAzure = args.getOption(OptionParserFactory.SRC_AZURE);
+ this.srcAzureConfig = args.getOption(OptionParserFactory.SRC_AZURE_CONFIG);
this.dstFbs = args.getOption(OptionParserFactory.DST_FBS);
this.dstFds = args.getOption(OptionParserFactory.DST_FDS);
this.dstS3 = args.getOption(OptionParserFactory.DST_S3);
this.dstS3Config = args.getOption(OptionParserFactory.DST_S3_CONFIG);
+ this.dstAzure = args.getOption(OptionParserFactory.DST_AZURE);
+ this.dstAzureConfig = args.getOption(OptionParserFactory.DST_AZURE_CONFIG);
if (args.hasOption(OptionParserFactory.SRC_EXTERNAL_BLOBS)) {
this.srcExternalBlobs = args.getBooleanOption(OptionParserFactory.SRC_EXTERNAL_BLOBS);
@@ -259,6 +271,14 @@ public class MigrationOptions {
public String getSrcS3() {
return srcS3;
}
+
+ public String getSrcAzureConfig() {
+ return srcAzureConfig;
+ }
+
+ public String getSrcAzure() {
+ return srcAzure;
+ }
public String getDstFbs() {
return dstFbs;
@@ -275,6 +295,14 @@ public class MigrationOptions {
public String getDstS3() {
return dstS3;
}
+
+ public String getDstAzureConfig() {
+ return dstAzureConfig;
+ }
+
+ public String getDstAzure() {
+ return dstAzure;
+ }
public boolean isSrcFds() {
return StringUtils.isNotBlank(srcFds);
@@ -287,6 +315,12 @@ public class MigrationOptions {
public boolean isSrcS3() {
return StringUtils.isNotBlank(srcS3) && StringUtils.isNotBlank(srcS3Config);
}
+
+ public boolean isSrcAzure() {
+
+ // OAK-6632 - only Azure config should be required (path not needed)
+ return StringUtils.isNotBlank(srcAzureConfig);
+ }
public boolean isDstFds() {
return StringUtils.isNotBlank(dstFds);
@@ -299,13 +333,18 @@ public class MigrationOptions {
public boolean isDstS3() {
return StringUtils.isNotBlank(dstS3) && StringUtils.isNotBlank(dstS3Config);
}
+
+ public boolean isDstAzure() {
+ // OAK-6632 - only Azure config should be required (path not needed)
+ return StringUtils.isNotBlank(dstAzureConfig);
+ }
public boolean isSrcBlobStoreDefined() {
- return isSrcFbs() || isSrcFds() || isSrcS3();
+ return isSrcFbs() || isSrcFds() || isSrcS3() || isSrcAzure();
}
public boolean isDstBlobStoreDefined() {
- return isDstFbs() || isDstFds() || isDstS3();
+ return isDstFbs() || isDstFds() || isDstS3() || isDstAzure();
}
public void logOptions() {
diff --git a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/OptionParserFactory.java b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/OptionParserFactory.java
index 0683804bf0..6f9bbca752 100644
--- a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/OptionParserFactory.java
+++ b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/OptionParserFactory.java
@@ -51,12 +51,20 @@ public class OptionParserFactory {
public static final String SRC_S3 = "src-s3datastore";
public static final String SRC_S3_CONFIG = "src-s3config";
+
+ public static final String SRC_AZURE = "src-azuredatastore";
+
+ public static final String SRC_AZURE_CONFIG = "src-azureconfig";
public static final String SRC_EXTERNAL_BLOBS = "src-external-ds";
public static final String DST_FDS = "datastore";
public static final String DST_FBS = "fileblobstore";
+
+ public static final String DST_AZURE = "azuredatastore";
+
+ public static final String DST_AZURE_CONFIG = "azureconfig";
public static final String DST_S3 = "s3datastore";
@@ -110,6 +118,9 @@ public class OptionParserFactory {
op.accepts(SRC_S3, "Datastore directory to be used for the source S3").withRequiredArg().ofType(String.class);
op.accepts(SRC_S3_CONFIG, "Configuration file for the source S3DataStore").withRequiredArg()
.ofType(String.class);
+ op.accepts(SRC_AZURE, "Datastore directory to be used for the source Azure").withRequiredArg().ofType(String.class);
+ op.accepts(SRC_AZURE_CONFIG, "Configuration file for the source AzureDataStore").withRequiredArg()
+ .ofType(String.class);
op.accepts(DST_FDS, "Datastore directory to be used as a target FileDataStore").withRequiredArg()
.ofType(String.class);
op.accepts(DST_FBS, "Datastore directory to be used as a target FileBlobStore").withRequiredArg()
@@ -117,6 +128,9 @@ public class OptionParserFactory {
op.accepts(DST_S3, "Datastore directory to be used for the target S3").withRequiredArg().ofType(String.class);
op.accepts(DST_S3_CONFIG, "Configuration file for the target S3DataStore").withRequiredArg()
.ofType(String.class);
+ op.accepts(DST_AZURE, "Datastore directory to be used for the target Azure").withRequiredArg().ofType(String.class);
+ op.accepts(DST_AZURE_CONFIG, "Configuration file for the target AzureBlobStore").withRequiredArg()
+ .ofType(String.class);
op.accepts(IGNORE_MISSING_BINARIES, "Don't break the migration if some binaries are missing");
op.accepts(SRC_EXTERNAL_BLOBS, "Flag specifying if the source Store has external references or not")
.withRequiredArg().ofType(Boolean.class);
diff --git a/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/AzureDataStoreFactoryTest.java b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/AzureDataStoreFactoryTest.java
new file mode 100644
index 0000000000..5b2bd3b26c
--- /dev/null
+++ b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/AzureDataStoreFactoryTest.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.cli.blob;
+
+import org.apache.jackrabbit.oak.blob.cloud.azure.blobstorage.AzureDataStore;
+import org.apache.jackrabbit.oak.plugins.blob.AbstractSharedCachingDataStore;
+import org.junit.Test;
+
+import java.lang.reflect.Field;
+import java.util.Properties;
+
+import static org.junit.Assert.assertEquals;
+
+public class AzureDataStoreFactoryTest {
+
+ @Test
+ public void testPopulateProperties() throws NoSuchFieldException, IllegalAccessException {
+ Properties props = new Properties();
+ props.setProperty("cacheSize", "123");
+
+ AzureDataStore ds = AzureDataStoreFactory.createDS("/tmp", props);
+ assertEquals(123, readLong("cacheSize", AbstractSharedCachingDataStore.class, ds));
+ assertEquals("/tmp", readString("path", AbstractSharedCachingDataStore.class, ds));
+ }
+
+ @Test
+ public void testPopulatePropertiesNoPath() throws NoSuchFieldException, IllegalAccessException {
+ Properties props = new Properties();
+ props.setProperty("cacheSize", "123");
+
+ String tempDir = System.getProperty("java.io.tmpdir");
+
+
+ AzureDataStore ds = AzureDataStoreFactory.createDS(null, props);
+ assertEquals(123, readLong("cacheSize", AbstractSharedCachingDataStore.class, ds));
+ assertEquals(tempDir, readString("path", AbstractSharedCachingDataStore.class, ds));
+ }
+
+ @Test
+ public void testPopulatePropertiesPathFromConfig() throws NoSuchFieldException, IllegalAccessException {
+ Properties props = new Properties();
+ props.setProperty("cacheSize", "123");
+ props.setProperty("path", "/tmp");
+
+ AzureDataStore ds = AzureDataStoreFactory.createDS(null, props);
+ assertEquals(123, readLong("cacheSize", AbstractSharedCachingDataStore.class, ds));
+ assertEquals("/tmp", readString("path", AbstractSharedCachingDataStore.class, ds));
+ }
+
+
+
+ @Test
+ public void testStripOsgiPrefix() throws NoSuchFieldException, IllegalAccessException {
+ Properties props = new Properties();
+ props.setProperty("cacheSize", "I\"123\"");
+
+ AzureDataStore ds = AzureDataStoreFactory.createDS("xyz", props);
+ assertEquals(123, readLong("cacheSize", AbstractSharedCachingDataStore.class, ds));
+ }
+
+ private static long readLong(String fieldName, Class> clazz, Object object) throws NoSuchFieldException, IllegalAccessException {
+ Field field = clazz.getDeclaredField(fieldName);
+ field.setAccessible(true);
+ return field.getLong(object);
+ }
+
+ private static String readString(String fieldName, Class> clazz, Object object) throws NoSuchFieldException, IllegalAccessException {
+ Field field = clazz.getDeclaredField(fieldName);
+ field.setAccessible(true);
+ return (String) field.get(object);
+ }
+}
diff --git a/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/AzureToFbsTest.java b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/AzureToFbsTest.java
new file mode 100644
index 0000000000..79936749a7
--- /dev/null
+++ b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/AzureToFbsTest.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.cli.blob;
+
+import java.io.IOException;
+
+import org.apache.jackrabbit.oak.upgrade.cli.AbstractOak2OakTest;
+import org.apache.jackrabbit.oak.upgrade.cli.container.BlobStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.FileBlobStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.NodeStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.AzureDataStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.SegmentTarNodeStoreContainer;
+import org.junit.Assume;
+
+public class AzureToFbsTest extends AbstractOak2OakTest {
+ private static final String AZURE_PROPERTIES = System.getProperty("azure.properties");
+
+ private final BlobStoreContainer sourceBlob;
+
+ private final BlobStoreContainer destinationBlob;
+
+ private final NodeStoreContainer source;
+
+ private final NodeStoreContainer destination;
+
+ public AzureToFbsTest() throws IOException {
+ Assume.assumeTrue(AZURE_PROPERTIES != null && !AZURE_PROPERTIES.isEmpty());
+ sourceBlob = new AzureDataStoreContainer(AZURE_PROPERTIES);
+ destinationBlob = new FileBlobStoreContainer();
+ source = new SegmentTarNodeStoreContainer(sourceBlob);
+ destination = new SegmentTarNodeStoreContainer(destinationBlob);
+ }
+
+ @Override
+ protected NodeStoreContainer getSourceContainer() {
+ return source;
+ }
+
+ @Override
+ protected NodeStoreContainer getDestinationContainer() {
+ return destination;
+ }
+
+ @Override
+ protected String[] getArgs() {
+
+ // Don't add '-src-azuredatastore' as discussed on OAK-6632
+ return new String[] { "--copy-binaries", "--src-azureconfig",
+ AZURE_PROPERTIES, "--fileblobstore", destinationBlob.getDescription(), source.getDescription(),
+ destination.getDescription() };
+ }
+
+ @Override
+ protected boolean supportsCheckpointMigration() {
+ return true;
+ }
+}
diff --git a/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/AzureToInlinedTest.java b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/AzureToInlinedTest.java
new file mode 100644
index 0000000000..066a0619b8
--- /dev/null
+++ b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/AzureToInlinedTest.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.cli.blob;
+
+import java.io.IOException;
+
+import ch.qos.logback.classic.Level;
+import org.apache.jackrabbit.oak.commons.junit.LogCustomizer;
+import org.apache.jackrabbit.oak.plugins.blob.UploadStagingCache;
+import org.apache.jackrabbit.oak.upgrade.cli.AbstractOak2OakTest;
+import org.apache.jackrabbit.oak.upgrade.cli.container.AzureDataStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.BlobStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.NodeStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.S3DataStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.SegmentTarNodeStoreContainer;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+public class AzureToInlinedTest extends AbstractOak2OakTest {
+ private static final String AZURE_PROPERTIES = System.getProperty("azure.properties");
+
+ private final NodeStoreContainer source;
+
+ private final NodeStoreContainer destination;
+
+ private final BlobStoreContainer sourceBlob;
+
+ private LogCustomizer customLogs;
+
+ public AzureToInlinedTest() throws IOException {
+ Assume.assumeTrue(AZURE_PROPERTIES != null && !AZURE_PROPERTIES.isEmpty());
+ sourceBlob = new AzureDataStoreContainer(AZURE_PROPERTIES);
+ source = new SegmentTarNodeStoreContainer(sourceBlob);
+ destination = new SegmentTarNodeStoreContainer();
+ }
+
+ @Override
+ protected NodeStoreContainer getSourceContainer() {
+ return source;
+ }
+
+ @Override
+ protected NodeStoreContainer getDestinationContainer() {
+ return destination;
+ }
+
+ @Override
+ protected String[] getArgs() {
+ return new String[] { "--copy-binaries", "--src-azuredatastore", sourceBlob.getDescription(), "--src-azureconfig",
+ AZURE_PROPERTIES, source.getDescription(), destination.getDescription() };
+ }
+
+ @Override
+ protected boolean supportsCheckpointMigration() {
+ return true;
+ }
+
+ @Before
+ public void prepare() throws Exception {
+ // Capture logs
+ customLogs = LogCustomizer
+ .forLogger(UploadStagingCache.class.getName())
+ .enable(Level.INFO)
+ .filter(Level.INFO)
+ .contains("Uploads in progress on close [0]")
+ .create();
+ customLogs.starting();
+ super.prepare();
+ }
+
+ /**
+ * Tests whether all the s3 uploads finished
+ */
+ @Test
+ public void testAsyncUploadFinished() {
+ assertEquals(1, customLogs.getLogs().size());
+ customLogs.finished();
+ }
+
+}
diff --git a/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/AzureToS3Test.java b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/AzureToS3Test.java
new file mode 100644
index 0000000000..57379fc8b7
--- /dev/null
+++ b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/AzureToS3Test.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.cli.blob;
+
+import java.io.IOException;
+
+import org.apache.jackrabbit.oak.upgrade.cli.AbstractOak2OakTest;
+import org.apache.jackrabbit.oak.upgrade.cli.container.BlobStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.S3DataStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.NodeStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.AzureDataStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.SegmentTarNodeStoreContainer;
+import org.junit.Assume;
+
+public class AzureToS3Test extends AbstractOak2OakTest {
+ private static final String AZURE_PROPERTIES = System.getProperty("azure.properties");
+ private static final String S3_PROPERTIES = System.getProperty("s3.properties");
+
+
+ private final BlobStoreContainer sourceBlob;
+
+ private final BlobStoreContainer destinationBlob;
+
+ private final NodeStoreContainer source;
+
+ private final NodeStoreContainer destination;
+
+ public AzureToS3Test() throws IOException {
+ Assume.assumeTrue(AZURE_PROPERTIES != null && !AZURE_PROPERTIES.isEmpty());
+ Assume.assumeTrue(S3_PROPERTIES != null && !S3_PROPERTIES.isEmpty());
+
+ sourceBlob = new AzureDataStoreContainer(AZURE_PROPERTIES);
+ destinationBlob = new S3DataStoreContainer(S3_PROPERTIES);
+ source = new SegmentTarNodeStoreContainer(sourceBlob);
+ destination = new SegmentTarNodeStoreContainer(destinationBlob);
+ }
+
+ @Override
+ protected NodeStoreContainer getSourceContainer() {
+ return source;
+ }
+
+ @Override
+ protected NodeStoreContainer getDestinationContainer() {
+ return destination;
+ }
+
+ @Override
+ protected String[] getArgs() {
+
+ // Don't add '-src-azuredatastore' as discussed on OAK-6632
+ return new String[] { "--copy-binaries",
+ "--src-azureconfig", AZURE_PROPERTIES,
+ "--s3datastore", destinationBlob.getDescription(),
+ "--s3config", S3_PROPERTIES,
+ source.getDescription(),
+ destination.getDescription() };
+ }
+
+ @Override
+ protected boolean supportsCheckpointMigration() {
+ return true;
+ }
+}
diff --git a/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/FbsToAzureTest.java b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/FbsToAzureTest.java
new file mode 100644
index 0000000000..79d4b7e3b9
--- /dev/null
+++ b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/FbsToAzureTest.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.cli.blob;
+
+import java.io.IOException;
+
+import org.apache.jackrabbit.oak.upgrade.cli.AbstractOak2OakTest;
+import org.apache.jackrabbit.oak.upgrade.cli.container.BlobStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.FileBlobStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.NodeStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.AzureDataStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.SegmentTarNodeStoreContainer;
+import org.junit.Assume;
+
+public class FbsToAzureTest extends AbstractOak2OakTest {
+
+ private static final String AZURE_PROPERTIES = System.getProperty("azure.properties");
+
+ private final BlobStoreContainer sourceBlob;
+
+ private final BlobStoreContainer destinationBlob;
+
+ private final NodeStoreContainer source;
+
+ private final NodeStoreContainer destination;
+
+ public FbsToAzureTest() throws IOException {
+ Assume.assumeTrue(AZURE_PROPERTIES != null && !AZURE_PROPERTIES.isEmpty());
+ sourceBlob = new FileBlobStoreContainer();
+ destinationBlob = new AzureDataStoreContainer(AZURE_PROPERTIES);
+ source = new SegmentTarNodeStoreContainer(sourceBlob);
+ destination = new SegmentTarNodeStoreContainer(destinationBlob);
+ }
+
+ @Override
+ protected NodeStoreContainer getSourceContainer() {
+ return source;
+ }
+
+ @Override
+ protected NodeStoreContainer getDestinationContainer() {
+ return destination;
+ }
+
+ @Override
+ protected String[] getArgs() {
+ return new String[] { "--copy-binaries", "--src-fileblobstore", sourceBlob.getDescription(), "--azuredatastore",
+ destinationBlob.getDescription(), "--azureconfig", AZURE_PROPERTIES, source.getDescription(),
+ destination.getDescription() };
+ }
+
+ @Override
+ protected boolean supportsCheckpointMigration() {
+ return true;
+ }
+}
diff --git a/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/S3ToAzureTest.java b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/S3ToAzureTest.java
new file mode 100644
index 0000000000..b502e59598
--- /dev/null
+++ b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/blob/S3ToAzureTest.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.cli.blob;
+
+import java.io.IOException;
+
+import org.apache.jackrabbit.oak.upgrade.cli.AbstractOak2OakTest;
+import org.apache.jackrabbit.oak.upgrade.cli.container.BlobStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.NodeStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.S3DataStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.AzureDataStoreContainer;
+import org.apache.jackrabbit.oak.upgrade.cli.container.SegmentTarNodeStoreContainer;
+import org.junit.Assume;
+
+public class S3ToAzureTest extends AbstractOak2OakTest {
+
+ private static final String S3_PROPERTIES = System.getProperty("s3.properties");
+ private static final String AZURE_PROPERTIES = System.getProperty("azure.properties");
+
+ private final BlobStoreContainer sourceBlob;
+
+ private final BlobStoreContainer destinationBlob;
+
+ private final NodeStoreContainer source;
+
+ private final NodeStoreContainer destination;
+
+ public S3ToAzureTest() throws IOException {
+ Assume.assumeTrue(S3_PROPERTIES != null && !S3_PROPERTIES.isEmpty());
+ Assume.assumeTrue(AZURE_PROPERTIES != null && !AZURE_PROPERTIES.isEmpty());
+ sourceBlob = new S3DataStoreContainer(S3_PROPERTIES);
+ destinationBlob = new AzureDataStoreContainer(AZURE_PROPERTIES);
+ source = new SegmentTarNodeStoreContainer(sourceBlob);
+ destination = new SegmentTarNodeStoreContainer(destinationBlob);
+ }
+
+ @Override
+ protected NodeStoreContainer getSourceContainer() {
+ return source;
+ }
+
+ @Override
+ protected NodeStoreContainer getDestinationContainer() {
+ return destination;
+ }
+
+ @Override
+ protected String[] getArgs() {
+ return new String[] { "--copy-binaries", "--src-s3datastore", sourceBlob.getDescription(), "--src-s3config",
+ S3_PROPERTIES, "--azuredatastore", destinationBlob.getDescription(), "--azureconfig", AZURE_PROPERTIES,
+ source.getDescription(),
+ destination.getDescription() };
+ }
+
+ @Override
+ protected boolean supportsCheckpointMigration() {
+ return true;
+ }
+}
diff --git a/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/container/AzureDataStoreContainer.java b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/container/AzureDataStoreContainer.java
new file mode 100644
index 0000000000..83cd276427
--- /dev/null
+++ b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/container/AzureDataStoreContainer.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.upgrade.cli.container;
+
+import static org.apache.jackrabbit.oak.upgrade.cli.container.SegmentTarNodeStoreContainer.deleteRecursive;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+
+import org.apache.jackrabbit.oak.spi.blob.BlobStore;
+import org.apache.jackrabbit.oak.upgrade.cli.blob.AzureDataStoreFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.io.Closer;
+
+public class AzureDataStoreContainer implements BlobStoreContainer {
+
+ private static final Logger log = LoggerFactory.getLogger(AzureDataStoreContainer.class);
+
+ private final File directory;
+
+ private final AzureDataStoreFactory factory;
+
+ private final Closer closer;
+
+ public AzureDataStoreContainer(String configFile) throws IOException {
+ this.directory = Files.createTempDirectory(Paths.get("target"), "repo-azure").toFile();
+ this.factory = new AzureDataStoreFactory(configFile, directory.getPath(), false);
+ this.closer = Closer.create();
+ }
+
+ @Override
+ public BlobStore open() throws IOException {
+ return factory.create(closer);
+ }
+
+ @Override
+ public void close() {
+ try {
+ closer.close();
+ } catch (IOException e) {
+ log.error("Can't close store", e);
+ }
+ }
+
+ @Override
+ public void clean() throws IOException {
+ deleteRecursive(directory);
+ }
+
+ @Override
+ public String getDescription() {
+ return directory.getPath();
+ }
+
+}
\ No newline at end of file