create(BlobStoreConfiguration config)
+ throws Exception {
+ BlobStore blobStore = null;
+ BlobStoreBuilder builder = createFactory(config).orNull();
+
+ if ((builder != null) && (config != null)) {
+ blobStore = builder.build(config).orNull();
+ }
+ return Optional.fromNullable(blobStore);
+ }
+}
Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/cloud/CloudBlobStore.java
===================================================================
--- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/cloud/CloudBlobStore.java (revision 0)
+++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/cloud/CloudBlobStore.java (working copy)
@@ -0,0 +1,215 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.blob.cloud;
+
+import static org.jclouds.blobstore.options.PutOptions.Builder.multipart;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.jackrabbit.mk.blobs.AbstractBlobStore;
+import org.apache.jackrabbit.mk.util.StringUtils;
+import org.jclouds.ContextBuilder;
+import org.jclouds.blobstore.BlobStoreContext;
+import org.jclouds.blobstore.domain.Blob;
+import org.jclouds.io.Payload;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Maps;
+import com.google.common.io.ByteStreams;
+
+/**
+ * Implementation of the {@link BlobStore} to store blobs in a cloud blob store.
+ *
+ * Extends {@link AbstractBlobStore} and breaks the the binary to chunks for
+ * easier management.
+ */
+public class CloudBlobStore extends AbstractBlobStore {
+ /**
+ * Logger instance.
+ */
+ private static final Logger LOG = LoggerFactory.getLogger(CloudBlobStore.class);
+
+ /** Cloud Store context */
+ private BlobStoreContext context;
+
+ /** The bucket. */
+ private String cloudContainer;
+
+ private String accessKey;
+
+ private String secretKey;
+
+ private String cloudProvider;
+
+ protected String getCloudContainer() {
+ return cloudContainer;
+ }
+
+ public void setCloudContainer(String cloudContainer) {
+ this.cloudContainer = cloudContainer;
+ }
+
+ public String getAccessKey() {
+ return accessKey;
+ }
+
+ public void setAccessKey(String accessKey) {
+ this.accessKey = accessKey;
+ }
+
+ public String getSecretKey() {
+ return secretKey;
+ }
+
+ public void setSecretKey(String secretKey) {
+ this.secretKey = secretKey;
+ }
+
+ public String getCloudProvider() {
+ return cloudProvider;
+ }
+
+ public void setCloudProvider(String cloudProvider) {
+ this.cloudProvider = cloudProvider;
+ }
+
+ /**
+ * Instantiates a connection to the cloud blob store.
+ *
+ * @param cloudProvider
+ * the cloud provider
+ * @param accessKey
+ * the access key
+ * @param secretKey
+ * the secret key
+ * @param cloudContainer
+ * the bucket
+ * @throws Exception
+ */
+ public void init() throws Exception {
+ try {
+ this.context =
+ ContextBuilder.newBuilder(cloudProvider)
+ .credentials(accessKey, secretKey)
+ .buildView(BlobStoreContext.class);
+ context.getBlobStore().createContainerInLocation(null, cloudContainer);
+
+ LOG.info("Using bucket: " + cloudContainer);
+ } catch (Exception e) {
+ LOG.error("Error creating S3BlobStore : ", e);
+ throw e;
+ }
+ }
+
+ /**
+ * Uploads the block to the cloud service.
+ */
+ @Override
+ protected void storeBlock(byte[] digest, int level, byte[] data) throws IOException {
+ Preconditions.checkNotNull(context);
+
+ String id = StringUtils.convertBytesToHex(digest);
+
+ org.jclouds.blobstore.BlobStore blobStore = context.getBlobStore();
+
+ if (!blobStore.blobExists(cloudContainer, id)) {
+ Map metadata = Maps.newHashMap();
+ metadata.put("level", String.valueOf(level));
+
+ Blob blob = blobStore.blobBuilder(id)
+ .payload(data)
+ .userMetadata(metadata)
+ .build();
+ String etag = blobStore.putBlob(cloudContainer, blob, multipart());
+ LOG.debug("Blob " + id + " created with cloud tag : " + etag);
+ } else {
+ LOG.debug("Blob " + id + " already exists");
+ }
+ }
+
+ /**
+ * Reads the data from the actual cloud service.
+ */
+ @Override
+ protected byte[] readBlockFromBackend(BlockId blockId) throws Exception {
+ Preconditions.checkNotNull(context);
+
+ String id = StringUtils.convertBytesToHex(blockId.getDigest());
+
+ Blob cloudBlob = context.getBlobStore().getBlob(cloudContainer, id);
+ if (cloudBlob == null) {
+ String message = "Did not find block " + id;
+ LOG.error(message);
+ throw new IOException(message);
+ }
+
+ Payload payload = cloudBlob.getPayload();
+ try {
+ byte[] data = ByteStreams.toByteArray(payload.getInput());
+
+ if (blockId.getPos() == 0) {
+ return data;
+ }
+
+ int len = (int) (data.length - blockId.getPos());
+ if (len < 0) {
+ return new byte[0];
+ }
+ byte[] d2 = new byte[len];
+ System.arraycopy(data, (int) blockId.getPos(), d2, 0, len);
+ return d2;
+ } finally {
+ payload.close();
+ }
+ }
+
+ /**
+ * Delete the cloud container and all its contents.
+ *
+ */
+ public void deleteBucket() {
+ Preconditions.checkNotNull(context);
+
+ if (context.getBlobStore().containerExists(cloudContainer)) {
+ context.getBlobStore().deleteContainer(cloudContainer);
+ }
+ context.close();
+ }
+
+ @Override
+ public void startMark() throws IOException {
+ // No-op
+ }
+
+ @Override
+ protected void mark(BlockId id) throws Exception {
+ // No-op
+ }
+
+ @Override
+ public int sweep() throws IOException {
+ return 0;
+ }
+
+ @Override
+ protected boolean isMarkEnabled() {
+ return false;
+ }
+}
Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/cloud/CloudBlobStoreBuilder.java
===================================================================
--- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/cloud/CloudBlobStoreBuilder.java (revision 0)
+++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/cloud/CloudBlobStoreBuilder.java (working copy)
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.blob.cloud;
+
+import org.apache.commons.beanutils.BeanUtils;
+import org.apache.jackrabbit.mk.blobs.BlobStore;
+import org.apache.jackrabbit.oak.plugins.blob.BlobStoreBuilder;
+import org.apache.jackrabbit.oak.plugins.blob.BlobStoreConfiguration;
+
+import com.google.common.base.Optional;
+
+/**
+ * A factory helper for creating CloudBlobStore instance.
+ */
+public class CloudBlobStoreBuilder implements BlobStoreBuilder {
+
+ private static final CloudBlobStoreBuilder INSTANCE = new CloudBlobStoreBuilder();
+
+ public static CloudBlobStoreBuilder newInstance() {
+ return INSTANCE;
+ }
+
+ /**
+ * Creates the {@link CloudBlobStore} instance.
+ *
+ * @param configuration
+ * the configuration
+ * @return the blob store wrapped as {@link Optional} to indicate that the
+ * value might be null when a valid configuration object not
+ * available
+ * @throws Exception
+ * the exception
+ */
+ @Override
+ public Optional build(
+ BlobStoreConfiguration configuration)
+ throws Exception {
+ BlobStore blobStore = null;
+
+ blobStore = new CloudBlobStore();
+ BeanUtils.populate(blobStore, configuration.getConfigMap());
+ ((CloudBlobStore) blobStore).init();
+
+ return Optional.of(blobStore);
+ }
+}
\ No newline at end of file
Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java
===================================================================
--- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java (revision 0)
+++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java (working copy)
@@ -0,0 +1,511 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.blob.datastore;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Arrays;
+import java.util.concurrent.ExecutionException;
+
+import org.apache.jackrabbit.core.data.CachingDataStore;
+import org.apache.jackrabbit.core.data.DataIdentifier;
+import org.apache.jackrabbit.core.data.DataRecord;
+import org.apache.jackrabbit.core.data.DataStore;
+import org.apache.jackrabbit.core.data.DataStoreException;
+import org.apache.jackrabbit.mk.blobs.BlobStore;
+import org.apache.jackrabbit.mk.blobs.GarbageCollectableBlobStore;
+import org.apache.jackrabbit.mk.util.Cache;
+import org.apache.jackrabbit.mk.util.IOUtils;
+import org.apache.jackrabbit.mk.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Strings;
+import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.CacheLoader;
+import com.google.common.cache.LoadingCache;
+import com.google.common.cache.RemovalListener;
+import com.google.common.cache.RemovalNotification;
+
+/**
+ * A {@link BlobStore} implementation which is a compatibility wrapper for
+ * Jackrabbit {@link DataStore}.
+ *
+ * Uses a 2 level cache to improve random read performance.
+ *
+ * Caches the {@link InputStream} until fully read or closed. Number of streams
+ * cached are controlled by the
+ * {@link DataStoreConfiguration#getStreamCacheSize()} parameter
+ *
+ * Also, uses a 16MB bytes[] cache.
+ *
+ */
+public class DataStoreBlobStore implements GarbageCollectableBlobStore,
+ Cache.Backend {
+
+ /**
+ * Logger instance.
+ */
+ private static final Logger LOG = LoggerFactory.getLogger(DataStoreBlobStore.class);
+
+ protected static final int BLOCK_SIZE_LIMIT = 40;
+
+ private static final int DEFAULT_STREAM_CACHE_SIZE = 256;
+
+ /**
+ * The size of a block. 128 KB has been found to be as fast as larger
+ * values, and faster than smaller values. 2 MB results in less files.
+ */
+ private int blockSize = 2 * 1024 * 1024;
+
+ /**
+ * The block cache (16 MB). Caches blocks up to blockSize.
+ */
+ private Cache blockCache = Cache.newInstance(this, 16 * 1024 * 1024);
+
+ /** The stream cache size. */
+ protected int streamCacheSize;
+
+ /**
+ * The stream cache caches a number of streams to avoid opening a new stream
+ * on every random access read.
+ */
+ private LoadingCache streamCache;
+
+ private LoadingCache fileLengthCache;
+
+ /** The data store. */
+ private DataStore dataStore;
+
+ /**
+ * Gets the stream cache size.
+ *
+ * @return the stream cache size
+ */
+ protected int getStreamCacheSize() {
+ return streamCacheSize;
+ }
+
+ /**
+ * Sets the stream cache size.
+ *
+ * @param streamCacheSize
+ * the new stream cache size
+ */
+ protected void setStreamCacheSize(int streamCacheSize) {
+ this.streamCacheSize = streamCacheSize;
+ }
+
+ /**
+ * Sets the block size.
+ *
+ * @param x
+ * the new block size
+ */
+ public final void setBlockSize(final int x) {
+ validateBlockSize(x);
+ this.blockSize = x;
+ }
+
+ /**
+ * Validate block size.
+ *
+ * @param x
+ * the x
+ */
+ private static void validateBlockSize(final int x) {
+ if (x < BLOCK_SIZE_LIMIT) {
+ throw new IllegalArgumentException("The minimum size must be bigger "
+ + "than a content hash itself; limit = " + BLOCK_SIZE_LIMIT);
+ }
+ }
+
+ /**
+ * Initialized the blob store.
+ *
+ * @param dataStore
+ * the data store
+ * @param streamCacheSize
+ * the stream cache size
+ */
+ public void init(DataStore dataStore) {
+ if (streamCacheSize <= 0) {
+ streamCacheSize = DEFAULT_STREAM_CACHE_SIZE;
+ }
+
+ streamCache = CacheBuilder.newBuilder().maximumSize(streamCacheSize)
+ .removalListener(new RemovalListener() {
+ public void onRemoval(RemovalNotification removal) {
+ InputStream stream = removal.getValue();
+ IOUtils.closeQuietly(stream);
+ }
+ }).build(new CacheLoader() {
+ public InputStream load(String key) throws Exception {
+ return loadStream(key);
+ }
+ });
+ fileLengthCache = CacheBuilder.newBuilder().maximumSize(streamCacheSize)
+ .build(new CacheLoader() {
+ @Override
+ public Long load(String key) throws Exception {
+ return getBlobLength(key);
+ }
+ });
+ this.dataStore = dataStore;
+ }
+
+ /**
+ * Writes the input stream to the data store.
+ */
+ @Override
+ public String writeBlob(InputStream in) throws IOException {
+ try {
+ // add the record in the data store
+ DataRecord dataRec = dataStore.addRecord(in);
+ return dataRec.getIdentifier().toString();
+ } catch (DataStoreException e) {
+ throw new IOException(e);
+ } finally {
+ IOUtils.closeQuietly(in);
+ }
+ }
+
+ /**
+ * Reads the blob with the given blob id and range.
+ */
+ @Override
+ public int readBlob(String blobId, long pos, byte[] buff, int off, int length) throws IOException {
+ if (Strings.isNullOrEmpty(blobId)) {
+ return -1;
+ }
+
+ long blobLength;
+ try {
+ blobLength = fileLengthCache.get(blobId);
+ } catch (ExecutionException e) {
+ LOG.debug("File length cache error", e);
+ blobLength = getBlobLength(blobId);
+ }
+ LOG.debug("read {" + blobId + "}, {" + blobLength + "}");
+
+ long position = pos;
+ int offset = off;
+
+ if (position < blobLength) {
+ int totalLength = 0;
+ long bytesLeft = ((position + length) > blobLength ? blobLength - position : length);
+
+ // Reads all the logical blocks satisfying the required range
+ while (bytesLeft > 0) {
+ long posBlockStart = position / blockSize;
+ int posOffsetInBlock = (int) (position - posBlockStart * blockSize);
+
+ byte[] block = readBlock(blobId, posBlockStart);
+
+ long bytesToRead = Math.min(bytesLeft,
+ Math.min((blobLength - posOffsetInBlock), (blockSize - posOffsetInBlock)));
+ System.arraycopy(block, posOffsetInBlock, buff, offset, (int) bytesToRead);
+
+ position += bytesToRead;
+ offset += bytesToRead;
+ totalLength += bytesToRead;
+ bytesLeft -= bytesToRead;
+ }
+ return totalLength;
+ } else {
+ LOG.trace("Blob read for pos " + pos + "," + (pos + length - 1) + " out of range");
+ return -1;
+ }
+ }
+
+ /**
+ * Gets the data store.
+ *
+ * @return the data store
+ */
+ public DataStore getDataStore() {
+ return dataStore;
+ }
+
+ /**
+ * Sets the data store.
+ *
+ * @param dataStore
+ * the data store
+ */
+ protected void setDataStore(DataStore dataStore) {
+ this.dataStore = dataStore;
+ }
+
+ /**
+ * Load the block to the cache.
+ */
+ @Override
+ public final Data load(final LogicalBlockId id) {
+ byte[] data;
+ try {
+ data = readBlockFromBackend(id);
+ } catch (Exception e) {
+ throw new RuntimeException("failed to read block from backend, id " + id, e);
+ }
+ if (data == null) {
+ throw new IllegalArgumentException("The block with id " + id + " was not found");
+ }
+ LOG.debug("Read from backend (Cache Miss): " + id);
+ return new Data(data);
+ }
+
+ /**
+ * Gets the length of the blob identified by the blobId.
+ */
+ @Override
+ public final long getBlobLength(final String blobId) throws IOException {
+ if (Strings.isNullOrEmpty(blobId)) {
+ return 0;
+ }
+
+ Long length = null;
+ try {
+ if (dataStore instanceof CachingDataStore) {
+ length = ((CachingDataStore) dataStore).getLength(new DataIdentifier(blobId));
+ } else {
+ length = dataStore.getRecord(new DataIdentifier(blobId)).getLength();
+ }
+ return length;
+ } catch (DataStoreException e) {
+ throw new IOException("Could not get length of blob for id " + blobId, e);
+ }
+ }
+
+ /**
+ * Reads block from backend.
+ *
+ * @param id
+ * the id
+ * @return the byte[]
+ * @throws IOException
+ * Signals that an I/O exception has occurred.
+ */
+ private byte[] readBlockFromBackend(final LogicalBlockId id) throws IOException {
+ String key = StringUtils.convertBytesToHex(id.digest);
+ InputStream stream = null;
+ try {
+ stream = streamCache.get(key);
+ } catch (ExecutionException e) {
+ LOG.debug("Error retrieving from stream cache : " + key, e);
+ }
+
+ byte[] block = new byte[blockSize];
+ org.apache.commons.io.IOUtils.read(stream, block, 0, blockSize);
+
+ if ((stream != null) && (stream.available() <= 0)) {
+ streamCache.invalidate(key);
+ }
+ return block;
+ }
+
+ /**
+ * Loads the stream from the data store.
+ *
+ * @param key
+ * the key
+ * @return the input stream
+ * @throws IOException
+ * Signals that an I/O exception has occurred.
+ */
+ private InputStream loadStream(String key) throws IOException {
+ InputStream stream = null;
+ try {
+ stream = dataStore.getRecord(new DataIdentifier(key)).getStream();
+ } catch (DataStoreException e) {
+ throw new IOException("Could not read blob for id " + key, e);
+ }
+ return stream;
+ }
+
+ /**
+ * Reads block.
+ *
+ * @param blobId
+ * the blob id
+ * @param posStart
+ * the pos start
+ * @return the byte[]
+ * @throws Exception
+ * the exception
+ */
+ private byte[] readBlock(final String blobId, final long posStart) throws IOException {
+ byte[] digest = StringUtils.convertHexToBytes(blobId);
+ LogicalBlockId id = new LogicalBlockId(digest, posStart);
+
+ LOG.debug("Trying to read from cache : " + blobId + ", " + posStart);
+
+ return blockCache.get(id).data;
+ }
+
+ /**
+ * Delete all blobs older than.
+ *
+ * @param time
+ * the time
+ * @return the int
+ * @throws Exception
+ * the exception
+ */
+ public int deleteAllOlderThan(long time) throws Exception {
+ return dataStore.deleteAllOlderThan(time);
+ }
+
+ /**
+ * A file is divided into logical chunks. Blocks are small enough to fit in
+ * memory, so they can be cached.
+ */
+ public static class LogicalBlockId {
+
+ /** The digest. */
+ final byte[] digest;
+
+ /** The starting pos. */
+ final long pos;
+
+ /**
+ * Instantiates a new logical block id.
+ *
+ * @param digest
+ * the digest
+ * @param pos
+ * the starting position of the block
+ */
+ LogicalBlockId(final byte[] digest, final long pos) {
+ this.digest = digest;
+ this.pos = pos;
+ }
+
+ @Override
+ public final boolean equals(final Object other) {
+ if (this == other) {
+ return true;
+ }
+ if (other == null || !(other instanceof LogicalBlockId)) {
+ return false;
+ }
+ LogicalBlockId o = (LogicalBlockId) other;
+ return Arrays.equals(digest, o.digest) && pos == o.pos;
+ }
+
+ @Override
+ public final int hashCode() {
+ return Arrays.hashCode(digest) ^ (int) (pos >> 32) ^ (int) pos;
+ }
+
+ @Override
+ public final String toString() {
+ return StringUtils.convertBytesToHex(digest) + "@" + pos;
+ }
+
+ /**
+ * Gets the digest.
+ *
+ * @return the digest
+ */
+ public final byte[] getDigest() {
+ return digest;
+ }
+
+ /**
+ * Gets the starting position.
+ *
+ * @return the starting position
+ */
+ public final long getPos() {
+ return pos;
+ }
+ }
+
+ /**
+ * The data for a block.
+ */
+ public static class Data implements Cache.Value {
+
+ /** The data. */
+ final byte[] data;
+
+ /**
+ * Instantiates a new data.
+ *
+ * @param data
+ * the data
+ */
+ Data(final byte[] data) {
+ this.data = data;
+ }
+
+ @Override
+ public final String toString() {
+ String s = StringUtils.convertBytesToHex(data);
+ return s.length() > 100 ? s.substring(0, 100) + ".. (len=" + data.length + ")" : s;
+ }
+
+ @Override
+ public final int getMemory() {
+ return data.length;
+ }
+ }
+
+ @Override
+ public String writeBlob(String tempFileName) throws IOException {
+ File file = new File(tempFileName);
+ InputStream in = null;
+ try {
+ in = new FileInputStream(file);
+ return writeBlob(in);
+ } finally {
+ if (in != null) {
+ in.close();
+ }
+ file.delete();
+ }
+ }
+
+ @Override
+ public int sweep() throws IOException {
+ // no-op
+ return 0;
+ }
+
+ @Override
+ public void startMark() throws IOException {
+ }
+
+ @Override
+ public void clearInUse() {
+ dataStore.clearInUse();
+ }
+
+ @Override
+ public void clearCache() {
+ // no-op
+ }
+
+ @Override
+ public long getBlockSizeMin() {
+ // no-op
+ return 0;
+ }
+}
Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStoreBuilder.java
===================================================================
--- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStoreBuilder.java (revision 0)
+++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStoreBuilder.java (working copy)
@@ -0,0 +1,138 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.blob.datastore;
+
+import javax.jcr.RepositoryException;
+
+import org.apache.commons.beanutils.BeanUtils;
+import org.apache.jackrabbit.core.data.Backend;
+import org.apache.jackrabbit.core.data.CachingDataStore;
+import org.apache.jackrabbit.core.data.DataStore;
+import org.apache.jackrabbit.core.data.FileDataStore;
+import org.apache.jackrabbit.core.data.MultiDataStore;
+import org.apache.jackrabbit.core.data.db.DbDataStore;
+import org.apache.jackrabbit.core.util.db.ConnectionFactory;
+import org.apache.jackrabbit.mk.blobs.BlobStore;
+import org.apache.jackrabbit.oak.plugins.blob.BlobStoreBuilder;
+import org.apache.jackrabbit.oak.plugins.blob.BlobStoreConfiguration;
+
+import com.google.common.base.Optional;
+
+/**
+ * Helper class to create {@link DataStoreBlobStore} instance and inject the
+ * appropriate Jackrabbit {@link DataStore} instance based on the configuration.
+ */
+public class DataStoreBlobStoreBuilder implements BlobStoreBuilder {
+
+ private static final DataStoreBlobStoreBuilder INSTANCE = new DataStoreBlobStoreBuilder();
+
+ public static DataStoreBlobStoreBuilder newInstance() {
+ return INSTANCE;
+ }
+
+ /**
+ * Creates the wrapper {@link BlobStore} instance for Jackrabbit
+ * {@link DataStore}.
+ *
+ * @param configuration
+ * the configuration
+ * @return the dS blob store wrapped as{@link Optional} indicating that the
+ * value can be null when a valid configuration is not available
+ * @throws Exception
+ * the exception
+ */
+ @Override
+ public Optional build(BlobStoreConfiguration configuration) throws Exception {
+ BlobStore blobStore = null;
+
+ DataStore store = getDataStore(configuration);
+ if (store != null) {
+ blobStore = new DataStoreBlobStore();
+ BeanUtils.populate(blobStore, configuration.getConfigMap());
+ ((DataStoreBlobStore) blobStore).init(store);
+ }
+ return Optional.fromNullable(blobStore);
+ }
+
+ /**
+ * Gets the data store based on the DataStoreProvider.
+ *
+ * @param dataStoreConfig
+ * the data store config
+ * @param dataStoreType
+ * the data store type
+ * @return the data store
+ * @throws RepositoryException
+ * the repository exception
+ */
+ private DataStore getDataStore(BlobStoreConfiguration config) throws Exception {
+ return getDataStore(
+ (String) config.getProperty(BlobStoreConfiguration.PROP_DATA_STORE), config);
+ }
+
+ private DataStore getDataStore(String dataStoreType, BlobStoreConfiguration config) throws Exception {
+ DataStore dataStore = (DataStore) Class.forName(dataStoreType).newInstance();
+ BeanUtils.populate(dataStore, config.getConfigMap());
+
+ if (dataStore instanceof DbDataStore) {
+ ((DbDataStore) dataStore)
+ .setConnectionFactory(new ConnectionFactory());
+ }
+
+ if (dataStore instanceof MultiDataStore) {
+ DataStore primary =
+ getDataStore(
+ (String) config.getProperty(BlobStoreConfiguration.PRIMARY_DATA_STORE), config);
+ DataStore archive =
+ getDataStore(
+ (String) config.getProperty(BlobStoreConfiguration.ARCHIVE_DATA_STORE), config);
+ ((MultiDataStore) dataStore)
+ .setPrimaryDataStore(primary);
+ ((MultiDataStore) dataStore)
+ .setArchiveDataStore(archive);
+ dataStore.init(null);
+ } else if (!(dataStore instanceof FileDataStore)
+ && !(dataStore instanceof CachingDataStore)) {
+ dataStore.init(null);
+ return wrapInCachingDataStore(dataStore, config);
+ }
+ else {
+ dataStore.init(null);
+ }
+
+ return dataStore;
+ }
+
+ private DataStore wrapInCachingDataStore(final DataStore dataStore, BlobStoreConfiguration config) throws Exception {
+ CachingDataStore cachingStore = new CachingDataStore() {
+ @Override
+ protected Backend createBackend() {
+ return new DataStoreWrapperBackend(dataStore);
+ }
+
+ @Override
+ protected String getMarkerFile() {
+ return "db.init.done";
+ }
+ };
+
+ BeanUtils.populate(cachingStore, config.getConfigMap());
+ cachingStore.init(null);
+
+ return cachingStore;
+ }
+}
Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreWrapperBackend.java
===================================================================
--- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreWrapperBackend.java (revision 0)
+++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreWrapperBackend.java (working copy)
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.blob.datastore;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.jackrabbit.core.data.Backend;
+import org.apache.jackrabbit.core.data.CachingDataStore;
+import org.apache.jackrabbit.core.data.DataIdentifier;
+import org.apache.jackrabbit.core.data.DataStore;
+import org.apache.jackrabbit.core.data.DataStoreException;
+import org.apache.jackrabbit.core.data.MultiDataStoreAware;
+import org.apache.tika.io.IOUtils;
+
+import com.google.common.collect.Lists;
+
+/**
+ * {@link Backend} wrapper over Jackrabbit {@link DataStore} which enables using
+ * a {@link CachingDataStore} for local file caching.
+ */
+public class DataStoreWrapperBackend implements Backend {
+
+ /** The data store being wrapped. */
+ private DataStore dataStore;
+
+ /**
+ * Instantiates a new data store wrapper backend.
+ *
+ * @param dataStore
+ * the data store
+ */
+ public DataStoreWrapperBackend(DataStore dataStore) {
+ this.dataStore = dataStore;
+ }
+
+ @Override
+ public void init(CachingDataStore store, String homeDir, String config) throws DataStoreException {
+ }
+
+ @Override
+ public InputStream read(DataIdentifier identifier) throws DataStoreException {
+ return dataStore.getRecordIfStored(identifier).getStream();
+ }
+
+ @Override
+ public long getLength(DataIdentifier identifier) throws DataStoreException {
+ return dataStore.getRecord(identifier).getLength();
+ }
+
+ @Override
+ public long getLastModified(DataIdentifier identifier) throws DataStoreException {
+ return dataStore.getRecord(identifier).getLastModified();
+ }
+
+ @Override
+ public void write(DataIdentifier identifier, File file) throws DataStoreException {
+ InputStream stream = null;
+ try {
+ stream = new FileInputStream(file);
+ dataStore.addRecord(stream);
+ } catch (IOException io) {
+ throw new DataStoreException("Error retrieving stream from : " + file.getAbsolutePath());
+ } finally {
+ IOUtils.closeQuietly(stream);
+ }
+ }
+
+ @Override
+ public Iterator getAllIdentifiers() throws DataStoreException {
+ return dataStore.getAllIdentifiers();
+ }
+
+ @Override
+ public void touch(DataIdentifier identifier, long minModifiedDate) throws DataStoreException {
+ // currently no-op
+ }
+
+ @Override
+ public boolean exists(DataIdentifier identifier) throws DataStoreException {
+ return (dataStore.getRecordIfStored(identifier) != null);
+ }
+
+ @Override
+ public void close() throws DataStoreException {
+ dataStore.close();
+ }
+
+ @Override
+ public List deleteAllOlderThan(long timestamp) throws DataStoreException {
+ dataStore.deleteAllOlderThan(timestamp);
+ return Lists.newArrayList();
+ }
+
+ @Override
+ public void deleteRecord(DataIdentifier identifier) throws DataStoreException {
+ if (dataStore instanceof MultiDataStoreAware) {
+ ((MultiDataStoreAware) dataStore).deleteRecord(identifier);
+ }
+ }
+}
Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStoreService.java
===================================================================
--- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStoreService.java (revision 1566507)
+++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStoreService.java (working copy)
@@ -18,24 +18,25 @@
*/
package org.apache.jackrabbit.oak.plugins.document;
+import static org.apache.jackrabbit.oak.spi.whiteboard.WhiteboardUtils.registerMBean;
+
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
-import com.mongodb.DB;
-import com.mongodb.MongoClient;
-import com.mongodb.MongoClientOptions;
-import com.mongodb.MongoClientURI;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.ConfigurationPolicy;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Property;
+import org.apache.jackrabbit.mk.blobs.BlobStore;
import org.apache.jackrabbit.oak.api.jmx.CacheStatsMBean;
import org.apache.jackrabbit.oak.kernel.KernelNodeStore;
import org.apache.jackrabbit.oak.osgi.ObserverTracker;
import org.apache.jackrabbit.oak.osgi.OsgiWhiteboard;
+import org.apache.jackrabbit.oak.plugins.blob.BlobStoreConfiguration;
+import org.apache.jackrabbit.oak.plugins.blob.BlobStoreHelper;
import org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore;
import org.apache.jackrabbit.oak.plugins.document.util.MongoConnection;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
@@ -47,7 +48,11 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import static org.apache.jackrabbit.oak.spi.whiteboard.WhiteboardUtils.registerMBean;
+import com.google.common.base.Strings;
+import com.mongodb.DB;
+import com.mongodb.MongoClient;
+import com.mongodb.MongoClientOptions;
+import com.mongodb.MongoClientURI;
/**
* The OSGi service to start/stop a DocumentNodeStore instance.
@@ -112,6 +117,8 @@
int offHeapCache = PropertiesUtil.toInteger(prop(config, PROP_OFF_HEAP_CACHE), DEFAULT_OFF_HEAP_CACHE);
int cacheSize = PropertiesUtil.toInteger(prop(config, PROP_CACHE), DEFAULT_CACHE);
boolean useMK = PropertiesUtil.toBoolean(config.get(PROP_USE_MK), false);
+ String blobStoreType = PropertiesUtil.toString(config.get(BlobStoreConfiguration.PROP_BLOB_STORE_PROVIDER),
+ BlobStoreConfiguration.DEFAULT_BLOB_STORE_PROVIDER);
MongoClientOptions.Builder builder = MongoConnection.getDefaultBuilder();
MongoClientURI mongoURI = new MongoClientURI(uri,builder);
@@ -128,11 +135,31 @@
MongoClient client = new MongoClient(mongoURI);
DB mongoDB = client.getDB(db);
+ // Check if any valid external BlobStore is defined.
+ // If not then use the default which is MongoBlobStore
+ BlobStore blobStore = null;
+ if (Strings.isNullOrEmpty(blobStoreType)) {
+ blobStore = BlobStoreHelper.create(
+ BlobStoreConfiguration.newInstance().
+ loadFromContextOrMap(config, context))
+ .orNull();
+ }
+
+ if (blobStore == null) {
mk = new DocumentMK.Builder()
.memoryCacheSize(cacheSize * MB)
.offHeapCacheSize(offHeapCache * MB)
.setMongoDB(mongoDB)
.open();
+ }
+ else {
+ mk = new DocumentMK.Builder()
+ .memoryCacheSize(cacheSize * MB)
+ .offHeapCacheSize(offHeapCache * MB)
+ .setMongoDB(mongoDB)
+ .setBlobStore(blobStore)
+ .open();
+ }
logger.info("Connected to database {}", mongoDB);
Index: oak-core/src/main/resources/org/apache/jackrabbit/oak/plugins/blob/blobstore.properties
===================================================================
--- oak-core/src/main/resources/org/apache/jackrabbit/oak/plugins/blob/blobstore.properties (revision 0)
+++ oak-core/src/main/resources/org/apache/jackrabbit/oak/plugins/blob/blobstore.properties (working copy)
@@ -0,0 +1,42 @@
+# Identify the blob store provider if different from the default
+blobStoreProvider=
+
+# Properties for CloudBlobStore
+accessKey=
+secretKey=
+cloudContainer=oakblobstore
+cloudProvider=aws-s3
+
+# Common for all data store
+dataStoreProvider=org.apache.jackrabbit.core.data.FileDataStore
+streamCacheSize=256
+path=./repository/datastore
+
+# Caching data store properties
+cacheSize=68719476736
+secret=123456789
+cachePurgeTrigFactor=0.85d
+cachePurgeResizeFactor=0.95d
+minRecordLength=16384
+config=
+
+# DbDataStore properties
+url=jdbc:postgresql:test
+user=sa
+password=sa
+databaseType=postgresql
+driver=org.postgresql.Driver
+copyWhenReading=true
+tablePrefix=
+schemaObjectPrefix=
+schemaCheckEnabled=true
+
+# MultiDataStore properties
+maxAge=60
+moveDataTaskSleep=604800
+moveDataTaskFirstRunHourOfDay=1
+sleepBetweenRecords=100
+delayedDelete=false
+delayedDeleteSleep=86400
+primary=org.apache.jackrabbit.core.data.db.DbDataStore
+archive=org.apache.jackrabbit.core.data.FileDataStore
\ No newline at end of file
Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/CloudStoreUtils.java
===================================================================
--- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/CloudStoreUtils.java (revision 0)
+++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/CloudStoreUtils.java (working copy)
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.document.blob.cloud;
+
+import org.apache.jackrabbit.mk.blobs.AbstractBlobStore;
+import org.apache.jackrabbit.oak.plugins.blob.BlobStoreConfiguration;
+import org.apache.jackrabbit.oak.plugins.blob.BlobStoreHelper;
+import org.apache.jackrabbit.oak.plugins.blob.cloud.CloudBlobStore;
+
+/**
+ * Helper class for retrieving the appropriate blobStore instance
+ */
+public class CloudStoreUtils {
+
+ /**
+ * Gets the blob store.
+ *
+ * @return the blob store
+ * @throws Exception
+ * the exception
+ */
+ protected static AbstractBlobStore getBlobStore() throws Exception {
+ BlobStoreConfiguration config =
+ BlobStoreConfiguration.newInstance().loadFromSystemProps();
+ config.addProperty(
+ BlobStoreConfiguration.PROP_BLOB_STORE_PROVIDER, CloudBlobStore.class.getName());
+ return (AbstractBlobStore) BlobStoreHelper.create(config).orNull();
+ }
+}
\ No newline at end of file
Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudGetLengthTest.java
===================================================================
--- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudGetLengthTest.java (revision 0)
+++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudGetLengthTest.java (working copy)
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.document.blob.cloud;
+
+import org.apache.jackrabbit.oak.plugins.blob.cloud.CloudBlobStore;
+import org.apache.jackrabbit.oak.plugins.document.DocumentMK;
+import org.apache.jackrabbit.oak.plugins.document.MongoUtils;
+import org.apache.jackrabbit.oak.plugins.document.blob.DocumentMKGetLengthTest;
+import org.junit.After;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+/**
+ * Tests for {@code DocumentMK#getLength(String)} with {@link CloudBlobStore}
+ */
+public class DocumentMKCloudGetLengthTest extends DocumentMKGetLengthTest {
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ try {
+ Assume.assumeNotNull(CloudStoreUtils.getBlobStore());
+ } catch (Exception e) {
+ Assume.assumeNoException(e);
+ }
+ }
+
+ @Override
+ @Before
+ public void setUpConnection() throws Exception {
+ mongoConnection = MongoUtils.getConnection();
+ MongoUtils.dropCollections(mongoConnection.getDB());
+ mk = new DocumentMK.Builder().setMongoDB(mongoConnection.getDB())
+ .setBlobStore(CloudStoreUtils.getBlobStore()).open();
+ }
+
+ @Override
+ @After
+ public void tearDownConnection() throws Exception {
+ ((CloudBlobStore) mk.getNodeStore().getBlobStore()).deleteBucket();
+ mk.dispose();
+ // the db might already be closed
+ mongoConnection.close();
+ mongoConnection = MongoUtils.getConnection();
+ MongoUtils.dropCollections(mongoConnection.getDB());
+ mongoConnection.close();
+ }
+}
\ No newline at end of file
Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudReadTest.java
===================================================================
--- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudReadTest.java (revision 0)
+++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudReadTest.java (working copy)
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.document.blob.cloud;
+
+import org.apache.jackrabbit.oak.plugins.blob.cloud.CloudBlobStore;
+import org.apache.jackrabbit.oak.plugins.document.DocumentMK;
+import org.apache.jackrabbit.oak.plugins.document.MongoUtils;
+import org.apache.jackrabbit.oak.plugins.document.blob.DocumentMKReadTest;
+import org.junit.After;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+/**
+ * Tests for {@code DocumentMK#read(String, long, byte[], int, int)} with
+ * {@link CloudBlobStore}
+ */
+public class DocumentMKCloudReadTest extends DocumentMKReadTest {
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ try {
+ Assume.assumeNotNull(CloudStoreUtils.getBlobStore());
+ } catch (Exception e) {
+ Assume.assumeNoException(e);
+ }
+ }
+
+ @Override
+ @Before
+ public void setUpConnection() throws Exception {
+ mongoConnection = MongoUtils.getConnection();
+ MongoUtils.dropCollections(mongoConnection.getDB());
+ mk = new DocumentMK.Builder().setMongoDB(mongoConnection.getDB())
+ .setBlobStore(CloudStoreUtils.getBlobStore()).open();
+ }
+
+ @Override
+ @After
+ public void tearDownConnection() throws Exception {
+ ((CloudBlobStore) mk.getNodeStore().getBlobStore()).deleteBucket();
+ mk.dispose();
+ // the db might already be closed
+ mongoConnection.close();
+ mongoConnection = MongoUtils.getConnection();
+ MongoUtils.dropCollections(mongoConnection.getDB());
+ mongoConnection.close();
+ }
+}
\ No newline at end of file
Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudWriteTest.java
===================================================================
--- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudWriteTest.java (revision 0)
+++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudWriteTest.java (working copy)
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.document.blob.cloud;
+
+import org.apache.jackrabbit.oak.plugins.blob.cloud.CloudBlobStore;
+import org.apache.jackrabbit.oak.plugins.document.DocumentMK;
+import org.apache.jackrabbit.oak.plugins.document.MongoUtils;
+import org.apache.jackrabbit.oak.plugins.document.blob.DocumentMKWriteTest;
+import org.junit.After;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+/**
+ * Tests for {@code DocumentMK#write(java.io.InputStream)} with
+ * {@link CloudBlobStore}
+ */
+public class DocumentMKCloudWriteTest extends DocumentMKWriteTest {
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ try {
+ Assume.assumeNotNull(CloudStoreUtils.getBlobStore());
+ } catch (Exception e) {
+ Assume.assumeNoException(e);
+ }
+ }
+
+ @Override
+ @Before
+ public void setUpConnection() throws Exception {
+ mongoConnection = MongoUtils.getConnection();
+ MongoUtils.dropCollections(mongoConnection.getDB());
+ mk = new DocumentMK.Builder().setMongoDB(mongoConnection.getDB())
+ .setBlobStore(CloudStoreUtils.getBlobStore()).open();
+ }
+
+ @Override
+ @After
+ public void tearDownConnection() throws Exception {
+ ((CloudBlobStore) mk.getNodeStore().getBlobStore()).deleteBucket();
+ mk.dispose();
+ // the db might already be closed
+ mongoConnection.close();
+ mongoConnection = MongoUtils.getConnection();
+ MongoUtils.dropCollections(mongoConnection.getDB());
+ mongoConnection.close();
+ }
+}
\ No newline at end of file
Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/MongoCloudBlobStoreTest.java
===================================================================
--- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/MongoCloudBlobStoreTest.java (revision 0)
+++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/MongoCloudBlobStoreTest.java (working copy)
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.document.blob.cloud;
+
+import org.apache.jackrabbit.oak.plugins.blob.cloud.CloudBlobStore;
+import org.apache.jackrabbit.oak.plugins.document.blob.AbstractBlobStoreTest;
+import org.junit.After;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Tests the {@link CloudBlobStore} implementation.
+ */
+public class MongoCloudBlobStoreTest extends AbstractBlobStoreTest {
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ try {
+ Assume.assumeNotNull(CloudStoreUtils.getBlobStore());
+ } catch (Exception e) {
+ Assume.assumeNoException(e);
+ }
+ }
+
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ store = CloudStoreUtils.getBlobStore();
+ }
+
+ @Override
+ @After
+ public void tearDown() throws Exception {
+ ((CloudBlobStore) store).deleteBucket();
+ super.tearDown();
+ }
+
+ @Override
+ @Test
+ public void testGarbageCollection() throws Exception {
+ }
+}
\ No newline at end of file
Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DataStoreUtils.java
===================================================================
--- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DataStoreUtils.java (revision 0)
+++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DataStoreUtils.java (working copy)
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.document.blob.ds;
+
+import org.apache.jackrabbit.core.data.DataStore;
+import org.apache.jackrabbit.oak.plugins.blob.BlobStoreConfiguration;
+import org.apache.jackrabbit.oak.plugins.blob.BlobStoreHelper;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore;
+import org.apache.jackrabbit.oak.plugins.document.AbstractMongoConnectionTest;
+
+/**
+ * Helper for retrieving the {@link DataStoreBlobStore} instantiated with the
+ * appropriate {@link DataStore}.
+ */
+public class DataStoreUtils extends AbstractMongoConnectionTest {
+ public final static String PATH = "./target/repository/";
+
+ /**
+ * Gets the blob store.
+ *
+ * @return the blob store
+ * @throws Exception
+ * the exception
+ */
+ protected static DataStoreBlobStore getBlobStore() throws Exception {
+ BlobStoreConfiguration config =
+ BlobStoreConfiguration.newInstance().loadFromSystemProps();
+ config.addProperty(
+ BlobStoreConfiguration.PROP_BLOB_STORE_PROVIDER, DataStoreBlobStore.class.getName());
+ config.addProperty("path", PATH + "datastore");
+ return (DataStoreBlobStore) BlobStoreHelper.create(config).orNull();
+ }
+}
Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreGetLengthTest.java
===================================================================
--- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreGetLengthTest.java (revision 0)
+++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreGetLengthTest.java (working copy)
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.document.blob.ds;
+
+import java.io.File;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.jackrabbit.oak.plugins.document.DocumentMK;
+import org.apache.jackrabbit.oak.plugins.document.MongoUtils;
+import org.apache.jackrabbit.oak.plugins.document.blob.DocumentMKGetLengthTest;
+import org.junit.After;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+/**
+ * Tests for {@code DocumentMK#getLength(String)} using {@link DataStore}
+ */
+public class DocumentMKDataStoreGetLengthTest extends DocumentMKGetLengthTest {
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ try {
+ Assume.assumeNotNull(DataStoreUtils.getBlobStore());
+ } catch (Exception e) {
+ Assume.assumeNoException(e);
+ }
+ }
+
+ @Override
+ @Before
+ public void setUpConnection() throws Exception {
+ mongoConnection = MongoUtils.getConnection();
+ MongoUtils.dropCollections(mongoConnection.getDB());
+ mk = new DocumentMK.Builder().setMongoDB(mongoConnection.getDB())
+ .setBlobStore(DataStoreUtils.getBlobStore()).open();
+ }
+
+ @Override
+ @After
+ public void tearDownConnection() throws Exception {
+ FileUtils.deleteDirectory(new File(DataStoreUtils.PATH));
+ mk.dispose();
+ // the db might already be closed
+ mongoConnection.close();
+ mongoConnection = MongoUtils.getConnection();
+ MongoUtils.dropCollections(mongoConnection.getDB());
+ mongoConnection.close();
+ }
+}
\ No newline at end of file
Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreReadTest.java
===================================================================
--- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreReadTest.java (revision 0)
+++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreReadTest.java (working copy)
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.document.blob.ds;
+
+import java.io.File;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.jackrabbit.oak.plugins.document.DocumentMK;
+import org.apache.jackrabbit.oak.plugins.document.MongoUtils;
+import org.apache.jackrabbit.oak.plugins.document.blob.DocumentMKReadTest;
+import org.junit.After;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+/**
+ * Tests for {@code DocumentMK#read(String, long, byte[], int, int)} using
+ * {@link DataStore}
+ */
+public class DocumentMKDataStoreReadTest extends DocumentMKReadTest {
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ try {
+ Assume.assumeNotNull(DataStoreUtils.getBlobStore());
+ } catch (Exception e) {
+ Assume.assumeNoException(e);
+ }
+ }
+
+ @Override
+ @Before
+ public void setUpConnection() throws Exception {
+ mongoConnection = MongoUtils.getConnection();
+ MongoUtils.dropCollections(mongoConnection.getDB());
+ mk = new DocumentMK.Builder().setMongoDB(mongoConnection.getDB())
+ .setBlobStore(DataStoreUtils.getBlobStore()).open();
+ }
+
+ @Override
+ @After
+ public void tearDownConnection() throws Exception {
+ FileUtils.deleteDirectory(new File(DataStoreUtils.PATH));
+ mk.dispose();
+ // the db might already be closed
+ mongoConnection.close();
+ mongoConnection = MongoUtils.getConnection();
+ MongoUtils.dropCollections(mongoConnection.getDB());
+ mongoConnection.close();
+ }
+}
\ No newline at end of file
Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreWriteTest.java
===================================================================
--- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreWriteTest.java (revision 0)
+++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreWriteTest.java (working copy)
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.document.blob.ds;
+
+import java.io.File;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.jackrabbit.oak.plugins.document.DocumentMK;
+import org.apache.jackrabbit.oak.plugins.document.MongoUtils;
+import org.apache.jackrabbit.oak.plugins.document.blob.DocumentMKWriteTest;
+import org.junit.After;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+/**
+ * Tests for {@code DocumentMK#write(java.io.InputStream)} using
+ * {@link DataStore}
+ */
+public class DocumentMKDataStoreWriteTest extends DocumentMKWriteTest {
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ try {
+ Assume.assumeNotNull(DataStoreUtils.getBlobStore());
+ } catch (Exception e) {
+ Assume.assumeNoException(e);
+ }
+ }
+
+ @Override
+ @Before
+ public void setUpConnection() throws Exception {
+ mongoConnection = MongoUtils.getConnection();
+ MongoUtils.dropCollections(mongoConnection.getDB());
+ mk = new DocumentMK.Builder().setMongoDB(mongoConnection.getDB())
+ .setBlobStore(DataStoreUtils.getBlobStore()).open();
+ }
+
+ @Override
+ @After
+ public void tearDownConnection() throws Exception {
+ FileUtils.deleteDirectory(new File(DataStoreUtils.PATH));
+ mk.dispose();
+ // the db might already be closed
+ mongoConnection.close();
+ mongoConnection = MongoUtils.getConnection();
+ MongoUtils.dropCollections(mongoConnection.getDB());
+ mongoConnection.close();
+ }
+}
\ No newline at end of file
Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/MongoDataStoreBlobStoreTest.java
===================================================================
--- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/MongoDataStoreBlobStoreTest.java (revision 0)
+++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/MongoDataStoreBlobStoreTest.java (working copy)
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.document.blob.ds;
+
+import java.io.File;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore;
+import org.apache.jackrabbit.oak.plugins.document.blob.AbstractBlobStoreTest;
+import org.junit.After;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Tests the {@link DataStoreBlobStore} implementation.
+ */
+public class MongoDataStoreBlobStoreTest extends AbstractBlobStoreTest {
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ try {
+ Assume.assumeNotNull(DataStoreUtils.getBlobStore());
+ } catch (Exception e) {
+ Assume.assumeNoException(e);
+ }
+ }
+
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ store = DataStoreUtils.getBlobStore();
+ }
+
+ @Override
+ @Test
+ public void testCombinedIdentifier() throws Exception {
+ }
+
+ @Override
+ @Test
+ public void testGarbageCollection() throws Exception {
+ }
+
+ @After
+ @Override
+ public void tearDown() throws Exception {
+ FileUtils.deleteDirectory(new File(DataStoreUtils.PATH));
+ super.tearDown();
+ }
+}
\ No newline at end of file
Index: oak-core/src/test/resources/aws.properties
===================================================================
--- oak-core/src/test/resources/aws.properties (revision 0)
+++ oak-core/src/test/resources/aws.properties (working copy)
@@ -0,0 +1,38 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# AWS account ID
+accessKey=
+# AWS secret key
+secretKey=
+# AWS bucket name
+s3Bucket=oakblobstore
+# AWS bucket region
+# Mapping of S3 regions to their constants
+# US Standard us-standard
+# US West us-west-2
+# US West (Northern California) us-west-1
+# EU (Ireland) EU
+# Asia Pacific (Singapore) ap-southeast-1
+# Asia Pacific (Sydney) ap-southeast-2
+# Asia Pacific (Tokyo) ap-northeast-1
+# South America (Sao Paulo) sa-east-1
+s3Region=us-standard
+connectionTimeout=120000
+socketTimeout=120000
+maxConnections=10
+maxErrorRetry=10
Index: oak-it/mk/pom.xml
===================================================================
--- oak-it/mk/pom.xml (revision 1566507)
+++ oak-it/mk/pom.xml (working copy)
@@ -108,6 +108,18 @@
com.google.guava
guava