diff --git oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureUtilities.java oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureUtilities.java index c9bfda9bad..468fd14eba 100644 --- oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureUtilities.java +++ oak-segment-azure/src/main/java/org/apache/jackrabbit/oak/segment/azure/AzureUtilities.java @@ -20,6 +20,8 @@ import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.blob.BlobListingDetails; import com.microsoft.azure.storage.blob.CloudBlob; import com.microsoft.azure.storage.blob.CloudBlobDirectory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.URISyntaxException; @@ -27,7 +29,6 @@ import java.nio.ByteBuffer; import java.nio.file.Paths; import java.util.EnumSet; import java.util.UUID; -import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; @@ -35,6 +36,8 @@ public final class AzureUtilities { public static String SEGMENT_FILE_NAME_PATTERN = "^([0-9a-f]{4})\\.([0-9a-f-]+)$"; + private static final Logger log = LoggerFactory.getLogger(AzureUtilities.class); + private AzureUtilities() { } @@ -75,4 +78,15 @@ public final class AzureUtilities { throw new IOException(e); } } + + public static void deleteAllEntries(CloudBlobDirectory directory) throws IOException { + Stream blobs = getBlobs(directory); + blobs.forEach(b -> { + try { + b.deleteIfExists(); + } catch (StorageException e) { + log.error("Can't delete blob {}", b.getUri().getPath(), e); + } + }); + } } diff --git oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzuriteDockerRule.java oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzuriteDockerRule.java index 7e957d5fb3..0b4cac88e9 100644 --- oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzuriteDockerRule.java +++ oak-segment-azure/src/test/java/org/apache/jackrabbit/oak/segment/azure/AzuriteDockerRule.java @@ -50,7 +50,7 @@ public class AzuriteDockerRule implements TestRule { } public CloudBlobContainer getContainer(String name) throws URISyntaxException, StorageException, InvalidKeyException { - int mappedPort = wrappedRule.getContainer().getPortBinding("10000/tcp").getPort(); + int mappedPort = getMappedPort(); CloudStorageAccount cloud = CloudStorageAccount.parse("DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:" + mappedPort + "/devstoreaccount1;"); CloudBlobContainer container = cloud.createCloudBlobClient().getContainerReference(name); container.deleteIfExists(); @@ -70,4 +70,8 @@ public class AzuriteDockerRule implements TestRule { return wrappedRule.apply(statement, description); } + + public int getMappedPort() { + return wrappedRule.getContainer().getPortBinding("10000/tcp").getPort(); + } } diff --git oak-upgrade/pom.xml oak-upgrade/pom.xml index 9cad86d3ad..92e89689e6 100644 --- oak-upgrade/pom.xml +++ oak-upgrade/pom.xml @@ -51,6 +51,17 @@ shade + + + + *:* + + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + + + false @@ -95,6 +106,11 @@ oak-segment-tar ${project.version} + + org.apache.jackrabbit + oak-segment-azure + ${project.version} + org.apache.jackrabbit oak-store-document @@ -187,7 +203,20 @@ test-jar test + + org.apache.jackrabbit + oak-segment-azure + ${project.version} + test-jar + test + + + com.arakelian + docker-junit-rule + 2.1.0 + test + org.hamcrest hamcrest-all diff --git oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositorySidegrade.java oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositorySidegrade.java index 354eb5eeda..069b9732c4 100755 --- oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositorySidegrade.java +++ oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/RepositorySidegrade.java @@ -34,7 +34,6 @@ import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.plugins.migration.FilteringNodeState; -import org.apache.jackrabbit.oak.upgrade.nodestate.NameFilteringNodeState; import org.apache.jackrabbit.oak.plugins.migration.NodeStateCopier; import org.apache.jackrabbit.oak.plugins.migration.report.LoggingReporter; import org.apache.jackrabbit.oak.plugins.migration.report.ReportingNodeState; @@ -53,8 +52,9 @@ import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.jackrabbit.oak.spi.state.NodeStore; import org.apache.jackrabbit.oak.upgrade.RepositoryUpgrade.LoggingCompositeHook; import org.apache.jackrabbit.oak.upgrade.checkpoint.CheckpointRetriever; -import org.apache.jackrabbit.oak.upgrade.cli.node.SegmentTarFactory; +import org.apache.jackrabbit.oak.upgrade.cli.node.FileStoreUtils; import org.apache.jackrabbit.oak.upgrade.nodestate.MetadataExposingNodeState; +import org.apache.jackrabbit.oak.upgrade.nodestate.NameFilteringNodeState; import org.apache.jackrabbit.oak.upgrade.version.VersionCopyConfiguration; import org.apache.jackrabbit.oak.upgrade.version.VersionHistoryUtil; import org.apache.jackrabbit.oak.upgrade.version.VersionableEditor; @@ -176,8 +176,8 @@ public class RepositorySidegrade { this.target = target; FileStore fs = null; - if (target instanceof SegmentTarFactory.NodeStoreWithFileStore) { - fs = ((SegmentTarFactory.NodeStoreWithFileStore) target).getFileStore(); + if (target instanceof FileStoreUtils.NodeStoreWithFileStore) { + fs = ((FileStoreUtils.NodeStoreWithFileStore) target).getFileStore(); } this.targetFileStore = fs; } diff --git oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/checkpoint/CheckpointRetriever.java oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/checkpoint/CheckpointRetriever.java index 288a814948..1a9c00d841 100644 --- oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/checkpoint/CheckpointRetriever.java +++ oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/checkpoint/CheckpointRetriever.java @@ -28,7 +28,7 @@ import org.apache.jackrabbit.oak.segment.SegmentNodeStore; import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.jackrabbit.oak.spi.state.NodeStore; -import org.apache.jackrabbit.oak.upgrade.cli.node.SegmentTarFactory; +import org.apache.jackrabbit.oak.upgrade.cli.node.FileStoreUtils; import org.jetbrains.annotations.Nullable; import java.util.Collections; @@ -78,8 +78,8 @@ public final class CheckpointRetriever { result = getCheckpoints(org.apache.jackrabbit.oak.plugins.segment.CheckpointAccessor.getCheckpointsRoot((org.apache.jackrabbit.oak.plugins.segment.SegmentNodeStore) nodeStore)); } else if (nodeStore instanceof DocumentNodeStore) { result = DocumentCheckpointRetriever.getCheckpoints((DocumentNodeStore) nodeStore); - } else if (nodeStore instanceof SegmentTarFactory.NodeStoreWithFileStore) { - result = getCheckpoints(CheckpointAccessor.getCheckpointsRoot(((SegmentTarFactory.NodeStoreWithFileStore) nodeStore).getNodeStore())); + } else if (nodeStore instanceof FileStoreUtils.NodeStoreWithFileStore) { + result = getCheckpoints(CheckpointAccessor.getCheckpointsRoot(((FileStoreUtils.NodeStoreWithFileStore) nodeStore).getNodeStore())); } else { return null; } diff --git oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/FileStoreUtils.java oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/FileStoreUtils.java new file mode 100644 index 0000000000..adbc61ecdd --- /dev/null +++ oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/FileStoreUtils.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.node; + +import java.io.Closeable; +import java.io.IOException; + +import org.apache.jackrabbit.oak.segment.RecordType; +import org.apache.jackrabbit.oak.segment.Segment; +import org.apache.jackrabbit.oak.segment.SegmentId; +import org.apache.jackrabbit.oak.segment.SegmentNodeStore; +import org.apache.jackrabbit.oak.segment.file.FileStore; +import org.apache.jackrabbit.oak.segment.file.ReadOnlyFileStore; +import org.apache.jackrabbit.oak.spi.state.ProxyNodeStore; + +public class FileStoreUtils { + private FileStoreUtils() { + + } + + public static Closeable asCloseable(final ReadOnlyFileStore fs) { + return new Closeable() { + @Override + public void close() throws IOException { + fs.close(); + } + }; + } + + public static Closeable asCloseable(final FileStore fs) { + return new Closeable() { + @Override + public void close() throws IOException { + fs.close(); + } + }; + } + + public static boolean hasExternalBlobReferences(ReadOnlyFileStore fs) { + try { + for (SegmentId id : fs.getSegmentIds()) { + if (!id.isDataSegmentId()) { + continue; + } + id.getSegment().forEachRecord(new Segment.RecordConsumer() { + @Override + public void consume(int number, RecordType type, int offset) { + // FIXME the consumer should allow to stop processing + // see java.nio.file.FileVisitor + if (type == RecordType.BLOB_ID) { + throw new ExternalBlobFound(); + } + } + }); + } + return false; + } catch (ExternalBlobFound e) { + return true; + } finally { + fs.close(); + } + } + + private static class ExternalBlobFound extends RuntimeException { + private static final long serialVersionUID = 1L; + } + + public static class NodeStoreWithFileStore extends ProxyNodeStore { + + private final SegmentNodeStore segmentNodeStore; + + private final FileStore fileStore; + + public NodeStoreWithFileStore(SegmentNodeStore segmentNodeStore, FileStore fileStore) { + this.segmentNodeStore = segmentNodeStore; + this.fileStore = fileStore; + } + + public FileStore getFileStore() { + return fileStore; + } + + @Override + public SegmentNodeStore getNodeStore() { + return segmentNodeStore; + } + } +} diff --git oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/SegmentAzureFactory.java oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/SegmentAzureFactory.java new file mode 100644 index 0000000000..99ac19ae26 --- /dev/null +++ oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/SegmentAzureFactory.java @@ -0,0 +1,185 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.node; + +import static org.apache.jackrabbit.oak.upgrade.cli.node.FileStoreUtils.asCloseable; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.security.InvalidKeyException; + +import org.apache.jackrabbit.oak.segment.SegmentNodeStoreBuilders; +import org.apache.jackrabbit.oak.segment.azure.AzurePersistence; +import org.apache.jackrabbit.oak.segment.file.FileStore; +import org.apache.jackrabbit.oak.segment.file.FileStoreBuilder; +import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException; +import org.apache.jackrabbit.oak.segment.file.ReadOnlyFileStore; +import org.apache.jackrabbit.oak.spi.blob.BlobStore; +import org.apache.jackrabbit.oak.spi.state.NodeStore; +import org.apache.jackrabbit.oak.upgrade.cli.node.FileStoreUtils.NodeStoreWithFileStore; + +import com.google.common.io.Closer; +import com.google.common.io.Files; +import com.microsoft.azure.storage.CloudStorageAccount; +import com.microsoft.azure.storage.StorageCredentials; +import com.microsoft.azure.storage.StorageCredentialsAccountAndKey; +import com.microsoft.azure.storage.StorageException; +import com.microsoft.azure.storage.StorageUri; +import com.microsoft.azure.storage.blob.CloudBlobContainer; + +public class SegmentAzureFactory implements NodeStoreFactory { + private final String accountName; + private final String uri; + private final String connectionString; + private final String containerName; + private final String dir; + private final boolean readOnly; + + public static class Builder { + private final String dir; + private final boolean readOnly; + + private String accountName; + private String uri; + private String connectionString; + private String containerName; + + public Builder(String dir, boolean readOnly) { + this.dir = dir; + this.readOnly = readOnly; + } + + public Builder accountName(String accountName) { + this.accountName = accountName; + return this; + } + + public Builder uri(String uri) { + this.uri = uri; + return this; + } + + public Builder connectionString(String connectionString) { + this.connectionString = connectionString; + return this; + } + + public Builder containerName(String containerName) { + this.containerName = containerName; + return this; + } + + public SegmentAzureFactory build() { + return new SegmentAzureFactory(this); + } + } + + public SegmentAzureFactory(Builder builder) { + this.accountName = builder.accountName; + this.uri = builder.uri; + this.connectionString = builder.connectionString; + this.containerName = builder.containerName; + this.dir = builder.dir; + this.readOnly = builder.readOnly; + } + + @Override + public NodeStore create(BlobStore blobStore, Closer closer) throws IOException { + AzurePersistence azPersistence = null; + try { + azPersistence = createAzurePersistence(); + } catch (StorageException | URISyntaxException | InvalidKeyException e) { + throw new IllegalStateException(e); + } + + FileStoreBuilder builder = FileStoreBuilder.fileStoreBuilder(Files.createTempDir()) + .withCustomPersistence(azPersistence).withMemoryMapping(false); + + if (blobStore != null) { + builder.withBlobStore(blobStore); + } + + try { + if (readOnly) { + final ReadOnlyFileStore fs; + fs = builder.buildReadOnly(); + closer.register(asCloseable(fs)); + return SegmentNodeStoreBuilders.builder(fs).build(); + } else { + final FileStore fs; + fs = builder.build(); + closer.register(asCloseable(fs)); + return new NodeStoreWithFileStore(SegmentNodeStoreBuilders.builder(fs).build(), fs); + } + } catch (InvalidFileStoreVersionException e) { + throw new IllegalStateException(e); + } + } + + private AzurePersistence createAzurePersistence() throws StorageException, URISyntaxException, InvalidKeyException { + AzurePersistence azPersistence = null; + + if (accountName != null && uri != null) { + String key = System.getenv("AZURE_SECRET_KEY"); + StorageCredentials credentials = new StorageCredentialsAccountAndKey(accountName, key); + StorageUri storageUri = new StorageUri(new URI(uri)); + CloudBlobContainer cloudBlobContainer = new CloudBlobContainer(storageUri, credentials); + + azPersistence = new AzurePersistence(cloudBlobContainer.getDirectoryReference(dir)); + } else if (connectionString != null && containerName != null) { + CloudStorageAccount cloud = CloudStorageAccount.parse(connectionString.toString()); + CloudBlobContainer container = cloud.createCloudBlobClient().getContainerReference(containerName); + container.createIfNotExists(); + + azPersistence = new AzurePersistence(container.getDirectoryReference(dir)); + } + + if (azPersistence == null) { + throw new IllegalArgumentException("Could not connect to Azure storage. Too few connection parameters specified!"); + } + + return azPersistence; + } + + @Override + public boolean hasExternalBlobReferences() throws IOException { + AzurePersistence azPersistence = null; + try { + azPersistence = createAzurePersistence(); + } catch (StorageException | URISyntaxException | InvalidKeyException e) { + throw new IllegalStateException(e); + } + + FileStoreBuilder builder = FileStoreBuilder.fileStoreBuilder(Files.createTempDir()) + .withCustomPersistence(azPersistence).withMemoryMapping(false); + + ReadOnlyFileStore fs; + try { + fs = builder.buildReadOnly(); + } catch (InvalidFileStoreVersionException e) { + throw new IOException(e); + } + + return FileStoreUtils.hasExternalBlobReferences(fs); + } + + @Override + public String toString() { + return String.format("AzureSegmentNodeStore[%s]", dir); + } +} diff --git oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/SegmentTarFactory.java oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/SegmentTarFactory.java index a4cddc2144..05c5e86d73 100644 --- oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/SegmentTarFactory.java +++ oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/node/SegmentTarFactory.java @@ -17,16 +17,11 @@ package org.apache.jackrabbit.oak.upgrade.cli.node; import static org.apache.jackrabbit.oak.segment.file.FileStoreBuilder.fileStoreBuilder; +import static org.apache.jackrabbit.oak.upgrade.cli.node.FileStoreUtils.asCloseable; -import java.io.Closeable; import java.io.File; import java.io.IOException; -import com.google.common.io.Closer; -import org.apache.jackrabbit.oak.segment.RecordType; -import org.apache.jackrabbit.oak.segment.Segment; -import org.apache.jackrabbit.oak.segment.SegmentId; -import org.apache.jackrabbit.oak.segment.SegmentNodeStore; import org.apache.jackrabbit.oak.segment.SegmentNodeStoreBuilders; import org.apache.jackrabbit.oak.segment.file.FileStore; import org.apache.jackrabbit.oak.segment.file.FileStoreBuilder; @@ -34,7 +29,9 @@ import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException; import org.apache.jackrabbit.oak.segment.file.ReadOnlyFileStore; import org.apache.jackrabbit.oak.spi.blob.BlobStore; import org.apache.jackrabbit.oak.spi.state.NodeStore; -import org.apache.jackrabbit.oak.spi.state.ProxyNodeStore; +import org.apache.jackrabbit.oak.upgrade.cli.node.FileStoreUtils.NodeStoreWithFileStore; + +import com.google.common.io.Closer; public class SegmentTarFactory implements NodeStoreFactory { @@ -101,78 +98,15 @@ public class SegmentTarFactory implements NodeStoreFactory { } catch (InvalidFileStoreVersionException e) { throw new IOException(e); } - try { - for (SegmentId id : fs.getSegmentIds()) { - if (!id.isDataSegmentId()) { - continue; - } - id.getSegment().forEachRecord(new Segment.RecordConsumer() { - @Override - public void consume(int number, RecordType type, int offset) { - // FIXME the consumer should allow to stop processing - // see java.nio.file.FileVisitor - if (type == RecordType.BLOB_ID) { - throw new ExternalBlobFound(); - } - } - }); - } - return false; - } catch (ExternalBlobFound e) { - return true; - } finally { - fs.close(); - } + return FileStoreUtils.hasExternalBlobReferences(fs); } public File getRepositoryDir() { return dir; } - private static Closeable asCloseable(final ReadOnlyFileStore fs) { - return new Closeable() { - @Override - public void close() throws IOException { - fs.close(); - } - }; - } - - private static Closeable asCloseable(final FileStore fs) { - return new Closeable() { - @Override - public void close() throws IOException { - fs.close(); - } - }; - } - @Override public String toString() { return String.format("SegmentTarNodeStore[%s]", dir); } - - private static class ExternalBlobFound extends RuntimeException { - } - - public static class NodeStoreWithFileStore extends ProxyNodeStore { - - private final SegmentNodeStore segmentNodeStore; - - private final FileStore fileStore; - - public NodeStoreWithFileStore(SegmentNodeStore segmentNodeStore, FileStore fileStore) { - this.segmentNodeStore = segmentNodeStore; - this.fileStore = fileStore; - } - - public FileStore getFileStore() { - return fileStore; - } - - @Override - public SegmentNodeStore getNodeStore() { - return segmentNodeStore; - } - } } diff --git oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/AzureParserUtils.java oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/AzureParserUtils.java new file mode 100644 index 0000000000..1e42e37085 --- /dev/null +++ oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/AzureParserUtils.java @@ -0,0 +1,155 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.parser; + +import static org.apache.jackrabbit.oak.upgrade.cli.parser.AzureParserUtils.AzureConnectionKey.*; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Map; + +/** + * Utility class for parsing Oak Segment Azure configuration (e.g. connection + * string, container name, uri, etc.) from custom encoded String or Azure + * standard URI. + */ +public class AzureParserUtils { + public enum AzureConnectionKey { + DEFAULT_ENDPOINTS_PROTOCOL("DefaultEndpointsProtocol"), + ACCOUNT_NAME("AccountName"), + ACCOUNT_KEY("AccountKey"), + BLOB_ENDPOINT("BlobEndpoint"), + CONTAINER_NAME("ContainerName"), + DIRECTORY("Directory"); + + private String text; + + AzureConnectionKey(String text) { + this.text = text; + } + + public String text() { + return text; + } + } + + public static final String KEY_CONNECTION_STRING = "connectionString"; + public static final String KEY_CONTAINER_NAME = "containerName"; + public static final String KEY_ACCOUNT_NAME = "accountName"; + public static final String KEY_STORAGE_URI = "storageUri"; + public static final String KEY_DIR = "directory"; + + private AzureParserUtils() { + // prevent instantiation + } + + /** + * + * @param conn + * the connection string + * @return true if this is a custom encoded Azure connection + * String, false otherwise + */ + public static boolean isCustomAzureConnectionString(String conn) { + return conn.contains(DEFAULT_ENDPOINTS_PROTOCOL.text()); + } + + /** + * Parses a custom encoded connection string of the form (line breaks added for + * clarity): + *

+ * DefaultEndpointsProtocol=http;AccountName=devstoreaccount1; + * AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==; + * BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;
+ * ContainerName=mycontainer;
+ * Directory=mydir + *

+ * where the first three lines in the string represent a standard Azure + * Connection String and the last two lines are Oak Segment Azure specific + * arguments. Please note that all configuration keys are semicolon separated, except for the last entry. The order + * of keys is not important. + * + * @param conn + * the connection string + * @return parsed configuration map containing the Azure connectionString, + * containerName and dir (key names in bold) + */ + public static Map parseAzureConfigurationFromCustomConnection(String conn) { + Map tempConfig = new HashMap<>(); + + String[] connKeys = conn.split(";"); + for (AzureConnectionKey key : AzureConnectionKey.values()) { + for (String connKey : connKeys) { + if (connKey.toLowerCase().startsWith(key.text().toLowerCase())) { + tempConfig.put(key, connKey.substring(connKey.indexOf("=") + 1)); + } + } + } + + StringBuilder connectionString = new StringBuilder(); + connectionString.append(DEFAULT_ENDPOINTS_PROTOCOL.text()).append("=").append(tempConfig.get(DEFAULT_ENDPOINTS_PROTOCOL)).append(";"); + connectionString.append(ACCOUNT_NAME.text()).append("=").append(tempConfig.get(ACCOUNT_NAME)).append(";"); + connectionString.append(ACCOUNT_KEY.text()).append("=").append(tempConfig.get(ACCOUNT_KEY)).append(";"); + connectionString.append(BLOB_ENDPOINT.text()).append("=").append(tempConfig.get(BLOB_ENDPOINT)).append(";"); + + Map config = new HashMap<>(); + config.put(KEY_CONNECTION_STRING, connectionString.toString()); + config.put(KEY_CONTAINER_NAME, tempConfig.get(CONTAINER_NAME)); + config.put(KEY_DIR, tempConfig.get(DIRECTORY)); + return config; + } + + /** + * Parses a standard Azure URI in the format + * https://myaccount.blob.core.windows.net/container/repo, + * + * @param uriStr + * the Azure URI as string + * @return parsed configuration map containing accountName, storageUri and dir + * (key names in bold) + * @throws URISyntaxException if an invalid Azure URI is used + */ + public static Map parseAzureConfigurationFromUri(String uriStr) { + Map config = new HashMap<>(); + + URI uri = null; + try { + uri = new URI(uriStr); + } catch (URISyntaxException e) { + throw new IllegalStateException(e); + } + + String host = uri.getHost(); + String path = uri.getPath(); + String scheme = uri.getScheme(); + + int lastSlashPosPath = path.lastIndexOf('/'); + int dotPosHost = host.indexOf("."); + + String accountName = host.substring(0, dotPosHost); + String container = path.substring(0, lastSlashPosPath); + String storageUri = scheme + "://" + host + container; + String dir = path.substring(lastSlashPosPath + 1); + + config.put(KEY_ACCOUNT_NAME, accountName); + config.put(KEY_STORAGE_URI, storageUri); + config.put(KEY_DIR, dir); + + return config; + } +} diff --git oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/StoreArguments.java oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/StoreArguments.java index 0edb6dcd0a..e31a56bb4d 100644 --- oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/StoreArguments.java +++ oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/StoreArguments.java @@ -42,6 +42,8 @@ public class StoreArguments { public static final String SEGMENT_OLD_PREFIX = "segment-old:"; + public static final String SEGMENT_AZURE_PREFIX = "az:"; + private static final Logger log = LoggerFactory.getLogger(StoreArguments.class); private final MigrationOptions options; diff --git oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/StoreType.java oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/StoreType.java index a479b43f32..c150f0f4a7 100644 --- oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/StoreType.java +++ oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/cli/parser/StoreType.java @@ -19,11 +19,24 @@ package org.apache.jackrabbit.oak.upgrade.cli.parser; import static org.apache.commons.lang.StringUtils.removeStart; import static org.apache.jackrabbit.oak.upgrade.cli.node.Jackrabbit2Factory.isJcr2Repository; import static org.apache.jackrabbit.oak.upgrade.cli.node.Jackrabbit2Factory.isRepositoryXml; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.AzureParserUtils.isCustomAzureConnectionString; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.AzureParserUtils.parseAzureConfigurationFromCustomConnection; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.AzureParserUtils.parseAzureConfigurationFromUri; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.StoreArguments.SEGMENT_AZURE_PREFIX; import static org.apache.jackrabbit.oak.upgrade.cli.parser.StoreArguments.SEGMENT_OLD_PREFIX; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.AzureParserUtils.KEY_ACCOUNT_NAME; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.AzureParserUtils.KEY_STORAGE_URI; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.AzureParserUtils.KEY_CONNECTION_STRING; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.AzureParserUtils.KEY_CONTAINER_NAME; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.AzureParserUtils.KEY_DIR; + + +import java.util.Map; import org.apache.jackrabbit.oak.upgrade.cli.node.Jackrabbit2Factory; import org.apache.jackrabbit.oak.upgrade.cli.node.JdbcFactory; import org.apache.jackrabbit.oak.upgrade.cli.node.MongoFactory; +import org.apache.jackrabbit.oak.upgrade.cli.node.SegmentAzureFactory; import org.apache.jackrabbit.oak.upgrade.cli.node.SegmentFactory; import org.apache.jackrabbit.oak.upgrade.cli.node.SegmentTarFactory; import org.apache.jackrabbit.oak.upgrade.cli.node.StoreFactory; @@ -136,6 +149,41 @@ public enum StoreType { return true; } }, + SEGMENT_AZURE { + @Override + public boolean matches(String argument) { + return argument.startsWith("az:"); + } + + @Override + public StoreFactory createFactory(String[] paths, MigrationDirection direction, MigrationOptions migrationOptions) { + String path = removeStart(paths[0], SEGMENT_AZURE_PREFIX); + + if (isCustomAzureConnectionString(path)) { + // azure configuration specified through connection string + Map config = parseAzureConfigurationFromCustomConnection(path); + return new StoreFactory(new SegmentAzureFactory.Builder(config.get(KEY_DIR), direction == MigrationDirection.SRC) + .connectionString(config.get(KEY_CONNECTION_STRING)) + .containerName(config.get(KEY_CONTAINER_NAME)) + .build() + ); + } else { + // azure configuration specified through URI + Map config = parseAzureConfigurationFromUri(path); + + return new StoreFactory(new SegmentAzureFactory.Builder(config.get(KEY_DIR), direction == MigrationDirection.SRC) + .accountName(config.get(KEY_ACCOUNT_NAME)) + .uri(config.get(KEY_STORAGE_URI)) + .build() + ); + } + } + + @Override + public boolean isSupportLongNames() { + return true; + } + }, SEGMENT_TAR { @Override public boolean matches(String argument) { @@ -169,6 +217,6 @@ public enum StoreType { public abstract boolean isSupportLongNames(); public boolean isSegment() { - return this == SEGMENT || this == SEGMENT_TAR; + return this == SEGMENT || this == SEGMENT_TAR || this == SEGMENT_AZURE; } } \ No newline at end of file diff --git oak-upgrade/src/main/resources/upgrade_usage.txt oak-upgrade/src/main/resources/upgrade_usage.txt index 6e11c76fa1..f2e8675bf2 100644 --- oak-upgrade/src/main/resources/upgrade_usage.txt +++ oak-upgrade/src/main/resources/upgrade_usage.txt @@ -35,6 +35,7 @@ in-place. Old files will be moved to the repository/crx2 directory. An descriptor of the Oak node store. Possible options: * path to the segment-tar store + * az:https://myaccount.blob.core.windows.net/container/repo (don't forget to set AZURE_SECRET_KEY env variable) * segment-old:/path/to/classic/segment * jdbc:... (requires passing username and password as separate parameters) * mongodb://host:port/database diff --git oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/SegmentAzureToSegmentTarTest.java oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/SegmentAzureToSegmentTarTest.java new file mode 100644 index 0000000000..ba8cae7c50 --- /dev/null +++ oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/SegmentAzureToSegmentTarTest.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jackrabbit.oak.upgrade.cli; + +import java.io.IOException; + +import org.apache.jackrabbit.oak.segment.azure.AzuriteDockerRule; +import org.apache.jackrabbit.oak.upgrade.cli.container.NodeStoreContainer; +import org.apache.jackrabbit.oak.upgrade.cli.container.SegmentAzureNodeStoreContainer; +import org.apache.jackrabbit.oak.upgrade.cli.container.SegmentTarNodeStoreContainer; +import org.junit.ClassRule; + +public class SegmentAzureToSegmentTarTest extends AbstractOak2OakTest { + + private final NodeStoreContainer source; + + private final NodeStoreContainer destination; + + @ClassRule + public static AzuriteDockerRule azurite = new AzuriteDockerRule(); + + public SegmentAzureToSegmentTarTest() throws IOException { + source = new SegmentAzureNodeStoreContainer(azurite); + destination = new SegmentTarNodeStoreContainer(); + } + + @Override + protected NodeStoreContainer getSourceContainer() { + return source; + } + + @Override + protected NodeStoreContainer getDestinationContainer() { + return destination; + } + + @Override + protected String[] getArgs() { + return new String[] { source.getDescription(), destination.getDescription()}; + } + + @Override + protected boolean supportsCheckpointMigration() { + return true; + } +} diff --git oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/SegmentTarToSegmentAzureTest.java oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/SegmentTarToSegmentAzureTest.java new file mode 100644 index 0000000000..44b0e8ab1b --- /dev/null +++ oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/SegmentTarToSegmentAzureTest.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.jackrabbit.oak.upgrade.cli; + +import java.io.IOException; + +import org.apache.jackrabbit.oak.segment.azure.AzuriteDockerRule; +import org.apache.jackrabbit.oak.upgrade.cli.container.NodeStoreContainer; +import org.apache.jackrabbit.oak.upgrade.cli.container.SegmentAzureNodeStoreContainer; +import org.apache.jackrabbit.oak.upgrade.cli.container.SegmentTarNodeStoreContainer; +import org.junit.ClassRule; + +public class SegmentTarToSegmentAzureTest extends AbstractOak2OakTest { + + private final NodeStoreContainer source; + + private final NodeStoreContainer destination; + + @ClassRule + public static AzuriteDockerRule azurite = new AzuriteDockerRule(); + + public SegmentTarToSegmentAzureTest() throws IOException { + source = new SegmentTarNodeStoreContainer(); + destination = new SegmentAzureNodeStoreContainer(azurite); + } + + @Override + protected NodeStoreContainer getSourceContainer() { + return source; + } + + @Override + protected NodeStoreContainer getDestinationContainer() { + return destination; + } + + @Override + protected String[] getArgs() { + return new String[] { source.getDescription(), destination.getDescription() }; + } + + @Override + protected boolean supportsCheckpointMigration() { + return true; + } +} diff --git oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/container/SegmentAzureNodeStoreContainer.java oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/container/SegmentAzureNodeStoreContainer.java new file mode 100644 index 0000000000..4706a10ea8 --- /dev/null +++ oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/container/SegmentAzureNodeStoreContainer.java @@ -0,0 +1,128 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.container; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.security.InvalidKeyException; + +import org.apache.jackrabbit.oak.segment.SegmentNodeStoreBuilders; +import org.apache.jackrabbit.oak.segment.azure.AzurePersistence; +import org.apache.jackrabbit.oak.segment.azure.AzureUtilities; +import org.apache.jackrabbit.oak.segment.azure.AzuriteDockerRule; +import org.apache.jackrabbit.oak.segment.file.FileStore; +import org.apache.jackrabbit.oak.segment.file.FileStoreBuilder; +import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException; +import org.apache.jackrabbit.oak.spi.state.NodeStore; + +import com.google.common.io.Files; +import com.microsoft.azure.storage.StorageException; +import com.microsoft.azure.storage.blob.CloudBlobContainer; + +public class SegmentAzureNodeStoreContainer implements NodeStoreContainer { + private static final String AZURE_ACCOUNT_NAME = "devstoreaccount1"; + private static final String AZURE_ACCOUNT_KEY = "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw=="; + + private final String dir; + + private final BlobStoreContainer blob; + + private final CloudBlobContainer container; + + private final int mappedPort; + + private FileStore fs; + + public SegmentAzureNodeStoreContainer(AzuriteDockerRule azurite) throws IOException { + this(azurite, null, null); + } + + public SegmentAzureNodeStoreContainer(AzuriteDockerRule azurite, String dir) throws IOException { + this(azurite, null, dir); + } + + public SegmentAzureNodeStoreContainer(AzuriteDockerRule azurite, BlobStoreContainer blob) throws IOException { + this(azurite, blob, null); + } + + private SegmentAzureNodeStoreContainer(AzuriteDockerRule azurite, BlobStoreContainer blob, String dir) + throws IOException { + this.blob = blob; + this.dir = dir == null ? "repository" : dir; + try { + this.container = azurite.getContainer("oak-test"); + this.mappedPort = azurite.getMappedPort(); + } catch (InvalidKeyException | URISyntaxException | StorageException e) { + throw new IOException(e); + } + } + + @Override + public NodeStore open() throws IOException { + AzurePersistence azPersistence = null; + try { + azPersistence = new AzurePersistence(container.getDirectoryReference(dir)); + } catch (URISyntaxException e) { + throw new IllegalStateException(e); + } + + FileStoreBuilder builder = FileStoreBuilder.fileStoreBuilder(Files.createTempDir()) + .withCustomPersistence(azPersistence).withMemoryMapping(false); + + if (blob != null) { + builder.withBlobStore(blob.open()); + } + + try { + fs = builder.build(); + } catch (InvalidFileStoreVersionException e) { + throw new IllegalStateException(e); + } + return SegmentNodeStoreBuilders.builder(fs).build(); + } + + @Override + public void close() { + if (fs != null) { + fs.close(); + fs = null; + } + } + + @Override + public void clean() throws IOException { + try { + AzureUtilities.deleteAllEntries(container.getDirectoryReference(dir)); + } catch (URISyntaxException e) { + throw new IOException(e); + } + } + + @Override + public String getDescription() { + StringBuilder description = new StringBuilder("az:"); + description.append("DefaultEndpointsProtocol=https;"); + description.append("AccountName=").append(AZURE_ACCOUNT_NAME).append(';'); + description.append("AccountKey=").append(AZURE_ACCOUNT_KEY).append(';'); + description.append("BlobEndpoint=http://127.0.0.1:").append(mappedPort).append("/devstoreaccount1;"); + description.append("ContainerName=").append(container.getName()).append(";"); + description.append("Directory=").append(dir); + + return description.toString(); + } + +} diff --git oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/parser/AzureParserUtilsTest.java oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/parser/AzureParserUtilsTest.java new file mode 100644 index 0000000000..4f0fd433b8 --- /dev/null +++ oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/cli/parser/AzureParserUtilsTest.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.upgrade.cli.parser; + +import static org.apache.jackrabbit.oak.upgrade.cli.parser.AzureParserUtils.KEY_ACCOUNT_NAME; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.AzureParserUtils.KEY_CONNECTION_STRING; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.AzureParserUtils.KEY_CONTAINER_NAME; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.AzureParserUtils.KEY_DIR; +import static org.apache.jackrabbit.oak.upgrade.cli.parser.AzureParserUtils.KEY_STORAGE_URI; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.util.Map; + +import org.junit.Test; + +public class AzureParserUtilsTest { + + @Test + public void testParseConnectionDetailsFromCustomConnection() throws CliArgumentException { + StringBuilder conn = new StringBuilder(); + StringBuilder connStr = new StringBuilder(); + connStr.append("DefaultEndpointsProtocol=https;"); + connStr.append("AccountName=myaccount;"); + connStr.append("AccountKey=mykey==;"); + connStr.append("BlobEndpoint=http://127.0.0.1:32806/myaccount;"); + + conn.append(connStr); + conn.append("ContainerName=oak-test;"); + conn.append("Directory=repository"); + + assertTrue(AzureParserUtils.isCustomAzureConnectionString(conn.toString())); + + Map config = AzureParserUtils.parseAzureConfigurationFromCustomConnection(conn.toString()); + assertEquals(connStr.toString(), config.get(KEY_CONNECTION_STRING)); + assertEquals("oak-test", config.get(KEY_CONTAINER_NAME)); + assertEquals("repository", config.get(KEY_DIR)); + } + + @Test + public void testParseConnectionDetailsFromCustomConnectionShuffledKeys() throws CliArgumentException { + StringBuilder conn = new StringBuilder(); + conn.append("Directory=repository;"); + conn.append("DefaultEndpointsProtocol=https;"); + conn.append("ContainerName=oak-test;"); + conn.append("AccountName=myaccount;"); + conn.append("BlobEndpoint=http://127.0.0.1:32806/myaccount;"); + conn.append("AccountKey=mykey=="); + + assertTrue(AzureParserUtils.isCustomAzureConnectionString(conn.toString())); + String azureConn = "DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=mykey==;BlobEndpoint=http://127.0.0.1:32806/myaccount;"; + + Map config = AzureParserUtils.parseAzureConfigurationFromCustomConnection(conn.toString()); + assertEquals(azureConn, config.get(KEY_CONNECTION_STRING)); + assertEquals("oak-test", config.get(KEY_CONTAINER_NAME)); + assertEquals("repository", config.get(KEY_DIR)); + } + + @Test + public void testParseConnectionDetailsFromUri() throws CliArgumentException { + String uri = "https://myaccount.blob.core.windows.net/oak-test/repository"; + assertFalse(AzureParserUtils.isCustomAzureConnectionString(uri)); + + Map config = AzureParserUtils.parseAzureConfigurationFromUri(uri); + + assertEquals("myaccount", config.get(KEY_ACCOUNT_NAME)); + assertEquals("https://myaccount.blob.core.windows.net/oak-test", config.get(KEY_STORAGE_URI)); + assertEquals("repository", config.get(KEY_DIR)); + } +}