Index: oak-core/pom.xml =================================================================== --- oak-core/pom.xml (revision 1566507) +++ oak-core/pom.xml (working copy) @@ -220,6 +220,11 @@ commons-codec 1.5 + + commons-beanutils + commons-beanutils + 1.9.1 + @@ -236,6 +241,13 @@ provided + + + org.apache.jclouds.provider + aws-s3 + true + + javax.jcr @@ -252,7 +264,18 @@ jackrabbit-jcr-commons ${jackrabbit.version} - + + org.apache.jackrabbit + jackrabbit-data + ${jackrabbit.version} + true + + + org.apache.jackrabbit + jackrabbit-aws-ext + ${jackrabbit.version} + true + org.slf4j Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobStoreBuilder.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobStoreBuilder.java (revision 0) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobStoreBuilder.java (working copy) @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.blob; + +import org.apache.jackrabbit.mk.blobs.BlobStore; + +import com.google.common.base.Optional; + +/** + * Interface for building blob stores. + */ +public interface BlobStoreBuilder { + + /** + * Builds the appropriate BlobStore. + * + * @param config + * the config + * @return the blob store wrapped as {@link Optional} to indicate that the + * value might be null + * @throws Exception + * the exception + */ + public Optional build(BlobStoreConfiguration config) throws Exception; +} Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobStoreConfiguration.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobStoreConfiguration.java (revision 0) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobStoreConfiguration.java (working copy) @@ -0,0 +1,176 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.blob; + +import java.io.IOException; +import java.util.Iterator; +import java.util.Map; +import java.util.Properties; +import java.util.Set; + +import javax.annotation.Nullable; + +import org.osgi.framework.BundleContext; + +import com.google.common.base.Predicate; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; + +/** + * Defines the configuration needed by a BlobStore. + */ +public class BlobStoreConfiguration { + + public static final String PRIMARY_DATA_STORE = "primary"; + + public static final String ARCHIVE_DATA_STORE = "archive"; + + public static final String PROP_DATA_STORE = "dataStoreProvider"; + + public static final String PROP_BLOB_STORE_PROVIDER = "blobStoreProvider"; + + public static final String DEFAULT_BLOB_STORE_PROVIDER = ""; + + private Map configMap; + + private Set propKeys; + + /** + * Instantiates a new data store configuration. + */ + private BlobStoreConfiguration() { + configMap = Maps.newHashMap(); + propKeys = Sets.newHashSet(); + + // get default props + Properties props = new Properties(); + try { + props.load(this.getClass().getResourceAsStream("blobstore.properties")); + } catch (IOException e) { + } + + // populate keys from the default set + Map defaultMap = Maps.fromProperties(props); + Iterator iter = defaultMap.keySet().iterator(); + while (iter.hasNext()) { + propKeys.add(iter.next()); + } + + // Remove empty default properties from the map + getConfigMap().putAll( + Maps.filterValues(defaultMap, new Predicate() { + @Override + public boolean apply(@Nullable String input) { + if ((input == null) || input.trim().length() == 0) { + return false; + } + return true; + } + })); + } + + /** + * Creates a new configuration object with default values. + * + * @return the data store configuration + */ + public static BlobStoreConfiguration newInstance() { + return new BlobStoreConfiguration(); + } + + /** + * Load configuration from the system props. + * + * @return the configuration + */ + public BlobStoreConfiguration loadFromSystemProps() { + // remove all jvm set properties to trim the map + getConfigMap().putAll(Maps.filterKeys(Maps.fromProperties(System.getProperties()), new Predicate() { + @Override + public boolean apply(@Nullable String input) { + if (input.startsWith("java.") || input.startsWith("sun.") || input.startsWith("user.") + || input.startsWith("file.") || input.startsWith("line.") || input.startsWith("os.") + || input.startsWith("awt.") || input.startsWith("path.")) { + return false; + } else { + return true; + } + } + })); + return this; + } + + /** + * Load configuration from a map. + * + * @param map + * the map + * @return the configuration + */ + @SuppressWarnings("unchecked") + public BlobStoreConfiguration loadFromMap(Map cfgMap) { + getConfigMap().putAll((Map) cfgMap); + loadFromSystemProps(); + + return this; + } + + /** + * Load configuration from a BundleContext or the map provided. + * + * @param map + * the map + * @param context + * the context + * @return the configuration + */ + public BlobStoreConfiguration loadFromContextOrMap(Map map, BundleContext context) { + loadFromMap(map); + + Map contextMap = Maps.newHashMap(); + for (String key : getPropKeys()) { + if (context.getProperty(key) != null) { + contextMap.put(key, context.getProperty(key)); + } + } + return this; + } + + public String getProperty(String key) { + return getConfigMap().get(key); + } + + public void addProperty(String key, String val) { + getConfigMap().put(key, val); + } + + public Map getConfigMap() { + return configMap; + } + + public void setConfigMap(Map configMap) { + this.configMap = configMap; + } + + public Set getPropKeys() { + return propKeys; + } + + public void setPropKeys(Set propKeys) { + this.propKeys = propKeys; + } +} Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobStoreHelper.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobStoreHelper.java (revision 0) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/BlobStoreHelper.java (working copy) @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.blob; + +import org.apache.jackrabbit.mk.blobs.BlobStore; +import org.apache.jackrabbit.oak.plugins.blob.cloud.CloudBlobStore; +import org.apache.jackrabbit.oak.plugins.blob.cloud.CloudBlobStoreBuilder; +import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore; +import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStoreBuilder; + +import com.google.common.base.Optional; +import com.google.common.base.Strings; + +/** + * A factory helper for creating BlobStore objects. + */ +public class BlobStoreHelper { + /** + * Creates the appropriate BlobStoreBuilder instance based on the blobType. + * + * @param blobStoreType + * the blob type + * @return the BlobStoreBuilder wrapped as {@link Optional} to indicate that + * the builder returned may be null in the case of a default + * BlobStoreType + * @throws Exception + * the exception + */ + public static Optional createFactory(BlobStoreConfiguration config) + throws Exception { + + BlobStoreBuilder builder = null; + if (!Strings.isNullOrEmpty( + config.getProperty(BlobStoreConfiguration.PROP_BLOB_STORE_PROVIDER))) { + String blobStoreProvider = + config.getProperty(BlobStoreConfiguration.PROP_BLOB_STORE_PROVIDER); + if (blobStoreProvider.equals(CloudBlobStore.class.getName())) { + builder = CloudBlobStoreBuilder.newInstance(); + } else if (blobStoreProvider.equals(DataStoreBlobStore.class.getName())) { + builder = DataStoreBlobStoreBuilder.newInstance(); + } + } + + return Optional.fromNullable(builder); + } + + /** + * Creates the appropriate BlobStore instance based on the blobType and the + * configuration. + * + * @param blobStoreType + * the blob type + * @param config + * the config + * @return the BlobStoreBuilder wrapped as {@link Optional} to indicate that + * the builder returned may be null in the case of a default + * BlobStoreType or an invalid config + * @throws Exception + * the exception + */ + public static Optional create(BlobStoreConfiguration config) + throws Exception { + BlobStore blobStore = null; + BlobStoreBuilder builder = createFactory(config).orNull(); + + if ((builder != null) && (config != null)) { + blobStore = builder.build(config).orNull(); + } + return Optional.fromNullable(blobStore); + } +} Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/cloud/CloudBlobStore.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/cloud/CloudBlobStore.java (revision 0) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/cloud/CloudBlobStore.java (working copy) @@ -0,0 +1,215 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.blob.cloud; + +import static org.jclouds.blobstore.options.PutOptions.Builder.multipart; + +import java.io.IOException; +import java.util.Map; + +import org.apache.jackrabbit.mk.blobs.AbstractBlobStore; +import org.apache.jackrabbit.mk.util.StringUtils; +import org.jclouds.ContextBuilder; +import org.jclouds.blobstore.BlobStoreContext; +import org.jclouds.blobstore.domain.Blob; +import org.jclouds.io.Payload; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Preconditions; +import com.google.common.collect.Maps; +import com.google.common.io.ByteStreams; + +/** + * Implementation of the {@link BlobStore} to store blobs in a cloud blob store. + *

+ * Extends {@link AbstractBlobStore} and breaks the the binary to chunks for + * easier management. + */ +public class CloudBlobStore extends AbstractBlobStore { + /** + * Logger instance. + */ + private static final Logger LOG = LoggerFactory.getLogger(CloudBlobStore.class); + + /** Cloud Store context */ + private BlobStoreContext context; + + /** The bucket. */ + private String cloudContainer; + + private String accessKey; + + private String secretKey; + + private String cloudProvider; + + protected String getCloudContainer() { + return cloudContainer; + } + + public void setCloudContainer(String cloudContainer) { + this.cloudContainer = cloudContainer; + } + + public String getAccessKey() { + return accessKey; + } + + public void setAccessKey(String accessKey) { + this.accessKey = accessKey; + } + + public String getSecretKey() { + return secretKey; + } + + public void setSecretKey(String secretKey) { + this.secretKey = secretKey; + } + + public String getCloudProvider() { + return cloudProvider; + } + + public void setCloudProvider(String cloudProvider) { + this.cloudProvider = cloudProvider; + } + + /** + * Instantiates a connection to the cloud blob store. + * + * @param cloudProvider + * the cloud provider + * @param accessKey + * the access key + * @param secretKey + * the secret key + * @param cloudContainer + * the bucket + * @throws Exception + */ + public void init() throws Exception { + try { + this.context = + ContextBuilder.newBuilder(cloudProvider) + .credentials(accessKey, secretKey) + .buildView(BlobStoreContext.class); + context.getBlobStore().createContainerInLocation(null, cloudContainer); + + LOG.info("Using bucket: " + cloudContainer); + } catch (Exception e) { + LOG.error("Error creating S3BlobStore : ", e); + throw e; + } + } + + /** + * Uploads the block to the cloud service. + */ + @Override + protected void storeBlock(byte[] digest, int level, byte[] data) throws IOException { + Preconditions.checkNotNull(context); + + String id = StringUtils.convertBytesToHex(digest); + + org.jclouds.blobstore.BlobStore blobStore = context.getBlobStore(); + + if (!blobStore.blobExists(cloudContainer, id)) { + Map metadata = Maps.newHashMap(); + metadata.put("level", String.valueOf(level)); + + Blob blob = blobStore.blobBuilder(id) + .payload(data) + .userMetadata(metadata) + .build(); + String etag = blobStore.putBlob(cloudContainer, blob, multipart()); + LOG.debug("Blob " + id + " created with cloud tag : " + etag); + } else { + LOG.debug("Blob " + id + " already exists"); + } + } + + /** + * Reads the data from the actual cloud service. + */ + @Override + protected byte[] readBlockFromBackend(BlockId blockId) throws Exception { + Preconditions.checkNotNull(context); + + String id = StringUtils.convertBytesToHex(blockId.getDigest()); + + Blob cloudBlob = context.getBlobStore().getBlob(cloudContainer, id); + if (cloudBlob == null) { + String message = "Did not find block " + id; + LOG.error(message); + throw new IOException(message); + } + + Payload payload = cloudBlob.getPayload(); + try { + byte[] data = ByteStreams.toByteArray(payload.getInput()); + + if (blockId.getPos() == 0) { + return data; + } + + int len = (int) (data.length - blockId.getPos()); + if (len < 0) { + return new byte[0]; + } + byte[] d2 = new byte[len]; + System.arraycopy(data, (int) blockId.getPos(), d2, 0, len); + return d2; + } finally { + payload.close(); + } + } + + /** + * Delete the cloud container and all its contents. + * + */ + public void deleteBucket() { + Preconditions.checkNotNull(context); + + if (context.getBlobStore().containerExists(cloudContainer)) { + context.getBlobStore().deleteContainer(cloudContainer); + } + context.close(); + } + + @Override + public void startMark() throws IOException { + // No-op + } + + @Override + protected void mark(BlockId id) throws Exception { + // No-op + } + + @Override + public int sweep() throws IOException { + return 0; + } + + @Override + protected boolean isMarkEnabled() { + return false; + } +} Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/cloud/CloudBlobStoreBuilder.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/cloud/CloudBlobStoreBuilder.java (revision 0) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/cloud/CloudBlobStoreBuilder.java (working copy) @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.blob.cloud; + +import org.apache.commons.beanutils.BeanUtils; +import org.apache.jackrabbit.mk.blobs.BlobStore; +import org.apache.jackrabbit.oak.plugins.blob.BlobStoreBuilder; +import org.apache.jackrabbit.oak.plugins.blob.BlobStoreConfiguration; + +import com.google.common.base.Optional; + +/** + * A factory helper for creating CloudBlobStore instance. + */ +public class CloudBlobStoreBuilder implements BlobStoreBuilder { + + private static final CloudBlobStoreBuilder INSTANCE = new CloudBlobStoreBuilder(); + + public static CloudBlobStoreBuilder newInstance() { + return INSTANCE; + } + + /** + * Creates the {@link CloudBlobStore} instance. + * + * @param configuration + * the configuration + * @return the blob store wrapped as {@link Optional} to indicate that the + * value might be null when a valid configuration object not + * available + * @throws Exception + * the exception + */ + @Override + public Optional build( + BlobStoreConfiguration configuration) + throws Exception { + BlobStore blobStore = null; + + blobStore = new CloudBlobStore(); + BeanUtils.populate(blobStore, configuration.getConfigMap()); + ((CloudBlobStore) blobStore).init(); + + return Optional.of(blobStore); + } +} \ No newline at end of file Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java (revision 0) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStore.java (working copy) @@ -0,0 +1,511 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.blob.datastore; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Arrays; +import java.util.concurrent.ExecutionException; + +import org.apache.jackrabbit.core.data.CachingDataStore; +import org.apache.jackrabbit.core.data.DataIdentifier; +import org.apache.jackrabbit.core.data.DataRecord; +import org.apache.jackrabbit.core.data.DataStore; +import org.apache.jackrabbit.core.data.DataStoreException; +import org.apache.jackrabbit.mk.blobs.BlobStore; +import org.apache.jackrabbit.mk.blobs.GarbageCollectableBlobStore; +import org.apache.jackrabbit.mk.util.Cache; +import org.apache.jackrabbit.mk.util.IOUtils; +import org.apache.jackrabbit.mk.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Strings; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import com.google.common.cache.RemovalListener; +import com.google.common.cache.RemovalNotification; + +/** + * A {@link BlobStore} implementation which is a compatibility wrapper for + * Jackrabbit {@link DataStore}. + *

+ * Uses a 2 level cache to improve random read performance. + * + * Caches the {@link InputStream} until fully read or closed. Number of streams + * cached are controlled by the + * {@link DataStoreConfiguration#getStreamCacheSize()} parameter + * + * Also, uses a 16MB bytes[] cache. + * + */ +public class DataStoreBlobStore implements GarbageCollectableBlobStore, + Cache.Backend { + + /** + * Logger instance. + */ + private static final Logger LOG = LoggerFactory.getLogger(DataStoreBlobStore.class); + + protected static final int BLOCK_SIZE_LIMIT = 40; + + private static final int DEFAULT_STREAM_CACHE_SIZE = 256; + + /** + * The size of a block. 128 KB has been found to be as fast as larger + * values, and faster than smaller values. 2 MB results in less files. + */ + private int blockSize = 2 * 1024 * 1024; + + /** + * The block cache (16 MB). Caches blocks up to blockSize. + */ + private Cache blockCache = Cache.newInstance(this, 16 * 1024 * 1024); + + /** The stream cache size. */ + protected int streamCacheSize; + + /** + * The stream cache caches a number of streams to avoid opening a new stream + * on every random access read. + */ + private LoadingCache streamCache; + + private LoadingCache fileLengthCache; + + /** The data store. */ + private DataStore dataStore; + + /** + * Gets the stream cache size. + * + * @return the stream cache size + */ + protected int getStreamCacheSize() { + return streamCacheSize; + } + + /** + * Sets the stream cache size. + * + * @param streamCacheSize + * the new stream cache size + */ + protected void setStreamCacheSize(int streamCacheSize) { + this.streamCacheSize = streamCacheSize; + } + + /** + * Sets the block size. + * + * @param x + * the new block size + */ + public final void setBlockSize(final int x) { + validateBlockSize(x); + this.blockSize = x; + } + + /** + * Validate block size. + * + * @param x + * the x + */ + private static void validateBlockSize(final int x) { + if (x < BLOCK_SIZE_LIMIT) { + throw new IllegalArgumentException("The minimum size must be bigger " + + "than a content hash itself; limit = " + BLOCK_SIZE_LIMIT); + } + } + + /** + * Initialized the blob store. + * + * @param dataStore + * the data store + * @param streamCacheSize + * the stream cache size + */ + public void init(DataStore dataStore) { + if (streamCacheSize <= 0) { + streamCacheSize = DEFAULT_STREAM_CACHE_SIZE; + } + + streamCache = CacheBuilder.newBuilder().maximumSize(streamCacheSize) + .removalListener(new RemovalListener() { + public void onRemoval(RemovalNotification removal) { + InputStream stream = removal.getValue(); + IOUtils.closeQuietly(stream); + } + }).build(new CacheLoader() { + public InputStream load(String key) throws Exception { + return loadStream(key); + } + }); + fileLengthCache = CacheBuilder.newBuilder().maximumSize(streamCacheSize) + .build(new CacheLoader() { + @Override + public Long load(String key) throws Exception { + return getBlobLength(key); + } + }); + this.dataStore = dataStore; + } + + /** + * Writes the input stream to the data store. + */ + @Override + public String writeBlob(InputStream in) throws IOException { + try { + // add the record in the data store + DataRecord dataRec = dataStore.addRecord(in); + return dataRec.getIdentifier().toString(); + } catch (DataStoreException e) { + throw new IOException(e); + } finally { + IOUtils.closeQuietly(in); + } + } + + /** + * Reads the blob with the given blob id and range. + */ + @Override + public int readBlob(String blobId, long pos, byte[] buff, int off, int length) throws IOException { + if (Strings.isNullOrEmpty(blobId)) { + return -1; + } + + long blobLength; + try { + blobLength = fileLengthCache.get(blobId); + } catch (ExecutionException e) { + LOG.debug("File length cache error", e); + blobLength = getBlobLength(blobId); + } + LOG.debug("read {" + blobId + "}, {" + blobLength + "}"); + + long position = pos; + int offset = off; + + if (position < blobLength) { + int totalLength = 0; + long bytesLeft = ((position + length) > blobLength ? blobLength - position : length); + + // Reads all the logical blocks satisfying the required range + while (bytesLeft > 0) { + long posBlockStart = position / blockSize; + int posOffsetInBlock = (int) (position - posBlockStart * blockSize); + + byte[] block = readBlock(blobId, posBlockStart); + + long bytesToRead = Math.min(bytesLeft, + Math.min((blobLength - posOffsetInBlock), (blockSize - posOffsetInBlock))); + System.arraycopy(block, posOffsetInBlock, buff, offset, (int) bytesToRead); + + position += bytesToRead; + offset += bytesToRead; + totalLength += bytesToRead; + bytesLeft -= bytesToRead; + } + return totalLength; + } else { + LOG.trace("Blob read for pos " + pos + "," + (pos + length - 1) + " out of range"); + return -1; + } + } + + /** + * Gets the data store. + * + * @return the data store + */ + public DataStore getDataStore() { + return dataStore; + } + + /** + * Sets the data store. + * + * @param dataStore + * the data store + */ + protected void setDataStore(DataStore dataStore) { + this.dataStore = dataStore; + } + + /** + * Load the block to the cache. + */ + @Override + public final Data load(final LogicalBlockId id) { + byte[] data; + try { + data = readBlockFromBackend(id); + } catch (Exception e) { + throw new RuntimeException("failed to read block from backend, id " + id, e); + } + if (data == null) { + throw new IllegalArgumentException("The block with id " + id + " was not found"); + } + LOG.debug("Read from backend (Cache Miss): " + id); + return new Data(data); + } + + /** + * Gets the length of the blob identified by the blobId. + */ + @Override + public final long getBlobLength(final String blobId) throws IOException { + if (Strings.isNullOrEmpty(blobId)) { + return 0; + } + + Long length = null; + try { + if (dataStore instanceof CachingDataStore) { + length = ((CachingDataStore) dataStore).getLength(new DataIdentifier(blobId)); + } else { + length = dataStore.getRecord(new DataIdentifier(blobId)).getLength(); + } + return length; + } catch (DataStoreException e) { + throw new IOException("Could not get length of blob for id " + blobId, e); + } + } + + /** + * Reads block from backend. + * + * @param id + * the id + * @return the byte[] + * @throws IOException + * Signals that an I/O exception has occurred. + */ + private byte[] readBlockFromBackend(final LogicalBlockId id) throws IOException { + String key = StringUtils.convertBytesToHex(id.digest); + InputStream stream = null; + try { + stream = streamCache.get(key); + } catch (ExecutionException e) { + LOG.debug("Error retrieving from stream cache : " + key, e); + } + + byte[] block = new byte[blockSize]; + org.apache.commons.io.IOUtils.read(stream, block, 0, blockSize); + + if ((stream != null) && (stream.available() <= 0)) { + streamCache.invalidate(key); + } + return block; + } + + /** + * Loads the stream from the data store. + * + * @param key + * the key + * @return the input stream + * @throws IOException + * Signals that an I/O exception has occurred. + */ + private InputStream loadStream(String key) throws IOException { + InputStream stream = null; + try { + stream = dataStore.getRecord(new DataIdentifier(key)).getStream(); + } catch (DataStoreException e) { + throw new IOException("Could not read blob for id " + key, e); + } + return stream; + } + + /** + * Reads block. + * + * @param blobId + * the blob id + * @param posStart + * the pos start + * @return the byte[] + * @throws Exception + * the exception + */ + private byte[] readBlock(final String blobId, final long posStart) throws IOException { + byte[] digest = StringUtils.convertHexToBytes(blobId); + LogicalBlockId id = new LogicalBlockId(digest, posStart); + + LOG.debug("Trying to read from cache : " + blobId + ", " + posStart); + + return blockCache.get(id).data; + } + + /** + * Delete all blobs older than. + * + * @param time + * the time + * @return the int + * @throws Exception + * the exception + */ + public int deleteAllOlderThan(long time) throws Exception { + return dataStore.deleteAllOlderThan(time); + } + + /** + * A file is divided into logical chunks. Blocks are small enough to fit in + * memory, so they can be cached. + */ + public static class LogicalBlockId { + + /** The digest. */ + final byte[] digest; + + /** The starting pos. */ + final long pos; + + /** + * Instantiates a new logical block id. + * + * @param digest + * the digest + * @param pos + * the starting position of the block + */ + LogicalBlockId(final byte[] digest, final long pos) { + this.digest = digest; + this.pos = pos; + } + + @Override + public final boolean equals(final Object other) { + if (this == other) { + return true; + } + if (other == null || !(other instanceof LogicalBlockId)) { + return false; + } + LogicalBlockId o = (LogicalBlockId) other; + return Arrays.equals(digest, o.digest) && pos == o.pos; + } + + @Override + public final int hashCode() { + return Arrays.hashCode(digest) ^ (int) (pos >> 32) ^ (int) pos; + } + + @Override + public final String toString() { + return StringUtils.convertBytesToHex(digest) + "@" + pos; + } + + /** + * Gets the digest. + * + * @return the digest + */ + public final byte[] getDigest() { + return digest; + } + + /** + * Gets the starting position. + * + * @return the starting position + */ + public final long getPos() { + return pos; + } + } + + /** + * The data for a block. + */ + public static class Data implements Cache.Value { + + /** The data. */ + final byte[] data; + + /** + * Instantiates a new data. + * + * @param data + * the data + */ + Data(final byte[] data) { + this.data = data; + } + + @Override + public final String toString() { + String s = StringUtils.convertBytesToHex(data); + return s.length() > 100 ? s.substring(0, 100) + ".. (len=" + data.length + ")" : s; + } + + @Override + public final int getMemory() { + return data.length; + } + } + + @Override + public String writeBlob(String tempFileName) throws IOException { + File file = new File(tempFileName); + InputStream in = null; + try { + in = new FileInputStream(file); + return writeBlob(in); + } finally { + if (in != null) { + in.close(); + } + file.delete(); + } + } + + @Override + public int sweep() throws IOException { + // no-op + return 0; + } + + @Override + public void startMark() throws IOException { + } + + @Override + public void clearInUse() { + dataStore.clearInUse(); + } + + @Override + public void clearCache() { + // no-op + } + + @Override + public long getBlockSizeMin() { + // no-op + return 0; + } +} Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStoreBuilder.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStoreBuilder.java (revision 0) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreBlobStoreBuilder.java (working copy) @@ -0,0 +1,138 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.blob.datastore; + +import javax.jcr.RepositoryException; + +import org.apache.commons.beanutils.BeanUtils; +import org.apache.jackrabbit.core.data.Backend; +import org.apache.jackrabbit.core.data.CachingDataStore; +import org.apache.jackrabbit.core.data.DataStore; +import org.apache.jackrabbit.core.data.FileDataStore; +import org.apache.jackrabbit.core.data.MultiDataStore; +import org.apache.jackrabbit.core.data.db.DbDataStore; +import org.apache.jackrabbit.core.util.db.ConnectionFactory; +import org.apache.jackrabbit.mk.blobs.BlobStore; +import org.apache.jackrabbit.oak.plugins.blob.BlobStoreBuilder; +import org.apache.jackrabbit.oak.plugins.blob.BlobStoreConfiguration; + +import com.google.common.base.Optional; + +/** + * Helper class to create {@link DataStoreBlobStore} instance and inject the + * appropriate Jackrabbit {@link DataStore} instance based on the configuration. + */ +public class DataStoreBlobStoreBuilder implements BlobStoreBuilder { + + private static final DataStoreBlobStoreBuilder INSTANCE = new DataStoreBlobStoreBuilder(); + + public static DataStoreBlobStoreBuilder newInstance() { + return INSTANCE; + } + + /** + * Creates the wrapper {@link BlobStore} instance for Jackrabbit + * {@link DataStore}. + * + * @param configuration + * the configuration + * @return the dS blob store wrapped as{@link Optional} indicating that the + * value can be null when a valid configuration is not available + * @throws Exception + * the exception + */ + @Override + public Optional build(BlobStoreConfiguration configuration) throws Exception { + BlobStore blobStore = null; + + DataStore store = getDataStore(configuration); + if (store != null) { + blobStore = new DataStoreBlobStore(); + BeanUtils.populate(blobStore, configuration.getConfigMap()); + ((DataStoreBlobStore) blobStore).init(store); + } + return Optional.fromNullable(blobStore); + } + + /** + * Gets the data store based on the DataStoreProvider. + * + * @param dataStoreConfig + * the data store config + * @param dataStoreType + * the data store type + * @return the data store + * @throws RepositoryException + * the repository exception + */ + private DataStore getDataStore(BlobStoreConfiguration config) throws Exception { + return getDataStore( + (String) config.getProperty(BlobStoreConfiguration.PROP_DATA_STORE), config); + } + + private DataStore getDataStore(String dataStoreType, BlobStoreConfiguration config) throws Exception { + DataStore dataStore = (DataStore) Class.forName(dataStoreType).newInstance(); + BeanUtils.populate(dataStore, config.getConfigMap()); + + if (dataStore instanceof DbDataStore) { + ((DbDataStore) dataStore) + .setConnectionFactory(new ConnectionFactory()); + } + + if (dataStore instanceof MultiDataStore) { + DataStore primary = + getDataStore( + (String) config.getProperty(BlobStoreConfiguration.PRIMARY_DATA_STORE), config); + DataStore archive = + getDataStore( + (String) config.getProperty(BlobStoreConfiguration.ARCHIVE_DATA_STORE), config); + ((MultiDataStore) dataStore) + .setPrimaryDataStore(primary); + ((MultiDataStore) dataStore) + .setArchiveDataStore(archive); + dataStore.init(null); + } else if (!(dataStore instanceof FileDataStore) + && !(dataStore instanceof CachingDataStore)) { + dataStore.init(null); + return wrapInCachingDataStore(dataStore, config); + } + else { + dataStore.init(null); + } + + return dataStore; + } + + private DataStore wrapInCachingDataStore(final DataStore dataStore, BlobStoreConfiguration config) throws Exception { + CachingDataStore cachingStore = new CachingDataStore() { + @Override + protected Backend createBackend() { + return new DataStoreWrapperBackend(dataStore); + } + + @Override + protected String getMarkerFile() { + return "db.init.done"; + } + }; + + BeanUtils.populate(cachingStore, config.getConfigMap()); + cachingStore.init(null); + + return cachingStore; + } +} Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreWrapperBackend.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreWrapperBackend.java (revision 0) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/DataStoreWrapperBackend.java (working copy) @@ -0,0 +1,119 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.blob.datastore; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Iterator; +import java.util.List; + +import org.apache.jackrabbit.core.data.Backend; +import org.apache.jackrabbit.core.data.CachingDataStore; +import org.apache.jackrabbit.core.data.DataIdentifier; +import org.apache.jackrabbit.core.data.DataStore; +import org.apache.jackrabbit.core.data.DataStoreException; +import org.apache.jackrabbit.core.data.MultiDataStoreAware; +import org.apache.tika.io.IOUtils; + +import com.google.common.collect.Lists; + +/** + * {@link Backend} wrapper over Jackrabbit {@link DataStore} which enables using + * a {@link CachingDataStore} for local file caching. + */ +public class DataStoreWrapperBackend implements Backend { + + /** The data store being wrapped. */ + private DataStore dataStore; + + /** + * Instantiates a new data store wrapper backend. + * + * @param dataStore + * the data store + */ + public DataStoreWrapperBackend(DataStore dataStore) { + this.dataStore = dataStore; + } + + @Override + public void init(CachingDataStore store, String homeDir, String config) throws DataStoreException { + } + + @Override + public InputStream read(DataIdentifier identifier) throws DataStoreException { + return dataStore.getRecordIfStored(identifier).getStream(); + } + + @Override + public long getLength(DataIdentifier identifier) throws DataStoreException { + return dataStore.getRecord(identifier).getLength(); + } + + @Override + public long getLastModified(DataIdentifier identifier) throws DataStoreException { + return dataStore.getRecord(identifier).getLastModified(); + } + + @Override + public void write(DataIdentifier identifier, File file) throws DataStoreException { + InputStream stream = null; + try { + stream = new FileInputStream(file); + dataStore.addRecord(stream); + } catch (IOException io) { + throw new DataStoreException("Error retrieving stream from : " + file.getAbsolutePath()); + } finally { + IOUtils.closeQuietly(stream); + } + } + + @Override + public Iterator getAllIdentifiers() throws DataStoreException { + return dataStore.getAllIdentifiers(); + } + + @Override + public void touch(DataIdentifier identifier, long minModifiedDate) throws DataStoreException { + // currently no-op + } + + @Override + public boolean exists(DataIdentifier identifier) throws DataStoreException { + return (dataStore.getRecordIfStored(identifier) != null); + } + + @Override + public void close() throws DataStoreException { + dataStore.close(); + } + + @Override + public List deleteAllOlderThan(long timestamp) throws DataStoreException { + dataStore.deleteAllOlderThan(timestamp); + return Lists.newArrayList(); + } + + @Override + public void deleteRecord(DataIdentifier identifier) throws DataStoreException { + if (dataStore instanceof MultiDataStoreAware) { + ((MultiDataStoreAware) dataStore).deleteRecord(identifier); + } + } +} Index: oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStoreService.java =================================================================== --- oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStoreService.java (revision 1566507) +++ oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStoreService.java (working copy) @@ -18,24 +18,25 @@ */ package org.apache.jackrabbit.oak.plugins.document; +import static org.apache.jackrabbit.oak.spi.whiteboard.WhiteboardUtils.registerMBean; + import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Properties; -import com.mongodb.DB; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientOptions; -import com.mongodb.MongoClientURI; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.ConfigurationPolicy; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Property; +import org.apache.jackrabbit.mk.blobs.BlobStore; import org.apache.jackrabbit.oak.api.jmx.CacheStatsMBean; import org.apache.jackrabbit.oak.kernel.KernelNodeStore; import org.apache.jackrabbit.oak.osgi.ObserverTracker; import org.apache.jackrabbit.oak.osgi.OsgiWhiteboard; +import org.apache.jackrabbit.oak.plugins.blob.BlobStoreConfiguration; +import org.apache.jackrabbit.oak.plugins.blob.BlobStoreHelper; import org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore; import org.apache.jackrabbit.oak.plugins.document.util.MongoConnection; import org.apache.jackrabbit.oak.spi.state.NodeStore; @@ -47,7 +48,11 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.jackrabbit.oak.spi.whiteboard.WhiteboardUtils.registerMBean; +import com.google.common.base.Strings; +import com.mongodb.DB; +import com.mongodb.MongoClient; +import com.mongodb.MongoClientOptions; +import com.mongodb.MongoClientURI; /** * The OSGi service to start/stop a DocumentNodeStore instance. @@ -112,6 +117,8 @@ int offHeapCache = PropertiesUtil.toInteger(prop(config, PROP_OFF_HEAP_CACHE), DEFAULT_OFF_HEAP_CACHE); int cacheSize = PropertiesUtil.toInteger(prop(config, PROP_CACHE), DEFAULT_CACHE); boolean useMK = PropertiesUtil.toBoolean(config.get(PROP_USE_MK), false); + String blobStoreType = PropertiesUtil.toString(config.get(BlobStoreConfiguration.PROP_BLOB_STORE_PROVIDER), + BlobStoreConfiguration.DEFAULT_BLOB_STORE_PROVIDER); MongoClientOptions.Builder builder = MongoConnection.getDefaultBuilder(); MongoClientURI mongoURI = new MongoClientURI(uri,builder); @@ -128,11 +135,31 @@ MongoClient client = new MongoClient(mongoURI); DB mongoDB = client.getDB(db); + // Check if any valid external BlobStore is defined. + // If not then use the default which is MongoBlobStore + BlobStore blobStore = null; + if (Strings.isNullOrEmpty(blobStoreType)) { + blobStore = BlobStoreHelper.create( + BlobStoreConfiguration.newInstance(). + loadFromContextOrMap(config, context)) + .orNull(); + } + + if (blobStore == null) { mk = new DocumentMK.Builder() .memoryCacheSize(cacheSize * MB) .offHeapCacheSize(offHeapCache * MB) .setMongoDB(mongoDB) .open(); + } + else { + mk = new DocumentMK.Builder() + .memoryCacheSize(cacheSize * MB) + .offHeapCacheSize(offHeapCache * MB) + .setMongoDB(mongoDB) + .setBlobStore(blobStore) + .open(); + } logger.info("Connected to database {}", mongoDB); Index: oak-core/src/main/resources/org/apache/jackrabbit/oak/plugins/blob/blobstore.properties =================================================================== --- oak-core/src/main/resources/org/apache/jackrabbit/oak/plugins/blob/blobstore.properties (revision 0) +++ oak-core/src/main/resources/org/apache/jackrabbit/oak/plugins/blob/blobstore.properties (working copy) @@ -0,0 +1,42 @@ +# Identify the blob store provider if different from the default +blobStoreProvider= + +# Properties for CloudBlobStore +accessKey= +secretKey= +cloudContainer=oakblobstore +cloudProvider=aws-s3 + +# Common for all data store +dataStoreProvider=org.apache.jackrabbit.core.data.FileDataStore +streamCacheSize=256 +path=./repository/datastore + +# Caching data store properties +cacheSize=68719476736 +secret=123456789 +cachePurgeTrigFactor=0.85d +cachePurgeResizeFactor=0.95d +minRecordLength=16384 +config= + +# DbDataStore properties +url=jdbc:postgresql:test +user=sa +password=sa +databaseType=postgresql +driver=org.postgresql.Driver +copyWhenReading=true +tablePrefix= +schemaObjectPrefix= +schemaCheckEnabled=true + +# MultiDataStore properties +maxAge=60 +moveDataTaskSleep=604800 +moveDataTaskFirstRunHourOfDay=1 +sleepBetweenRecords=100 +delayedDelete=false +delayedDeleteSleep=86400 +primary=org.apache.jackrabbit.core.data.db.DbDataStore +archive=org.apache.jackrabbit.core.data.FileDataStore \ No newline at end of file Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/CloudStoreUtils.java =================================================================== --- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/CloudStoreUtils.java (revision 0) +++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/CloudStoreUtils.java (working copy) @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document.blob.cloud; + +import org.apache.jackrabbit.mk.blobs.AbstractBlobStore; +import org.apache.jackrabbit.oak.plugins.blob.BlobStoreConfiguration; +import org.apache.jackrabbit.oak.plugins.blob.BlobStoreHelper; +import org.apache.jackrabbit.oak.plugins.blob.cloud.CloudBlobStore; + +/** + * Helper class for retrieving the appropriate blobStore instance + */ +public class CloudStoreUtils { + + /** + * Gets the blob store. + * + * @return the blob store + * @throws Exception + * the exception + */ + protected static AbstractBlobStore getBlobStore() throws Exception { + BlobStoreConfiguration config = + BlobStoreConfiguration.newInstance().loadFromSystemProps(); + config.addProperty( + BlobStoreConfiguration.PROP_BLOB_STORE_PROVIDER, CloudBlobStore.class.getName()); + return (AbstractBlobStore) BlobStoreHelper.create(config).orNull(); + } +} \ No newline at end of file Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudGetLengthTest.java =================================================================== --- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudGetLengthTest.java (revision 0) +++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudGetLengthTest.java (working copy) @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document.blob.cloud; + +import org.apache.jackrabbit.oak.plugins.blob.cloud.CloudBlobStore; +import org.apache.jackrabbit.oak.plugins.document.DocumentMK; +import org.apache.jackrabbit.oak.plugins.document.MongoUtils; +import org.apache.jackrabbit.oak.plugins.document.blob.DocumentMKGetLengthTest; +import org.junit.After; +import org.junit.Assume; +import org.junit.Before; +import org.junit.BeforeClass; + +/** + * Tests for {@code DocumentMK#getLength(String)} with {@link CloudBlobStore} + */ +public class DocumentMKCloudGetLengthTest extends DocumentMKGetLengthTest { + @BeforeClass + public static void setUpBeforeClass() throws Exception { + try { + Assume.assumeNotNull(CloudStoreUtils.getBlobStore()); + } catch (Exception e) { + Assume.assumeNoException(e); + } + } + + @Override + @Before + public void setUpConnection() throws Exception { + mongoConnection = MongoUtils.getConnection(); + MongoUtils.dropCollections(mongoConnection.getDB()); + mk = new DocumentMK.Builder().setMongoDB(mongoConnection.getDB()) + .setBlobStore(CloudStoreUtils.getBlobStore()).open(); + } + + @Override + @After + public void tearDownConnection() throws Exception { + ((CloudBlobStore) mk.getNodeStore().getBlobStore()).deleteBucket(); + mk.dispose(); + // the db might already be closed + mongoConnection.close(); + mongoConnection = MongoUtils.getConnection(); + MongoUtils.dropCollections(mongoConnection.getDB()); + mongoConnection.close(); + } +} \ No newline at end of file Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudReadTest.java =================================================================== --- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudReadTest.java (revision 0) +++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudReadTest.java (working copy) @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document.blob.cloud; + +import org.apache.jackrabbit.oak.plugins.blob.cloud.CloudBlobStore; +import org.apache.jackrabbit.oak.plugins.document.DocumentMK; +import org.apache.jackrabbit.oak.plugins.document.MongoUtils; +import org.apache.jackrabbit.oak.plugins.document.blob.DocumentMKReadTest; +import org.junit.After; +import org.junit.Assume; +import org.junit.Before; +import org.junit.BeforeClass; + +/** + * Tests for {@code DocumentMK#read(String, long, byte[], int, int)} with + * {@link CloudBlobStore} + */ +public class DocumentMKCloudReadTest extends DocumentMKReadTest { + @BeforeClass + public static void setUpBeforeClass() throws Exception { + try { + Assume.assumeNotNull(CloudStoreUtils.getBlobStore()); + } catch (Exception e) { + Assume.assumeNoException(e); + } + } + + @Override + @Before + public void setUpConnection() throws Exception { + mongoConnection = MongoUtils.getConnection(); + MongoUtils.dropCollections(mongoConnection.getDB()); + mk = new DocumentMK.Builder().setMongoDB(mongoConnection.getDB()) + .setBlobStore(CloudStoreUtils.getBlobStore()).open(); + } + + @Override + @After + public void tearDownConnection() throws Exception { + ((CloudBlobStore) mk.getNodeStore().getBlobStore()).deleteBucket(); + mk.dispose(); + // the db might already be closed + mongoConnection.close(); + mongoConnection = MongoUtils.getConnection(); + MongoUtils.dropCollections(mongoConnection.getDB()); + mongoConnection.close(); + } +} \ No newline at end of file Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudWriteTest.java =================================================================== --- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudWriteTest.java (revision 0) +++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/DocumentMKCloudWriteTest.java (working copy) @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document.blob.cloud; + +import org.apache.jackrabbit.oak.plugins.blob.cloud.CloudBlobStore; +import org.apache.jackrabbit.oak.plugins.document.DocumentMK; +import org.apache.jackrabbit.oak.plugins.document.MongoUtils; +import org.apache.jackrabbit.oak.plugins.document.blob.DocumentMKWriteTest; +import org.junit.After; +import org.junit.Assume; +import org.junit.Before; +import org.junit.BeforeClass; + +/** + * Tests for {@code DocumentMK#write(java.io.InputStream)} with + * {@link CloudBlobStore} + */ +public class DocumentMKCloudWriteTest extends DocumentMKWriteTest { + @BeforeClass + public static void setUpBeforeClass() throws Exception { + try { + Assume.assumeNotNull(CloudStoreUtils.getBlobStore()); + } catch (Exception e) { + Assume.assumeNoException(e); + } + } + + @Override + @Before + public void setUpConnection() throws Exception { + mongoConnection = MongoUtils.getConnection(); + MongoUtils.dropCollections(mongoConnection.getDB()); + mk = new DocumentMK.Builder().setMongoDB(mongoConnection.getDB()) + .setBlobStore(CloudStoreUtils.getBlobStore()).open(); + } + + @Override + @After + public void tearDownConnection() throws Exception { + ((CloudBlobStore) mk.getNodeStore().getBlobStore()).deleteBucket(); + mk.dispose(); + // the db might already be closed + mongoConnection.close(); + mongoConnection = MongoUtils.getConnection(); + MongoUtils.dropCollections(mongoConnection.getDB()); + mongoConnection.close(); + } +} \ No newline at end of file Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/MongoCloudBlobStoreTest.java =================================================================== --- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/MongoCloudBlobStoreTest.java (revision 0) +++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/cloud/MongoCloudBlobStoreTest.java (working copy) @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document.blob.cloud; + +import org.apache.jackrabbit.oak.plugins.blob.cloud.CloudBlobStore; +import org.apache.jackrabbit.oak.plugins.document.blob.AbstractBlobStoreTest; +import org.junit.After; +import org.junit.Assume; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * Tests the {@link CloudBlobStore} implementation. + */ +public class MongoCloudBlobStoreTest extends AbstractBlobStoreTest { + @BeforeClass + public static void setUpBeforeClass() throws Exception { + try { + Assume.assumeNotNull(CloudStoreUtils.getBlobStore()); + } catch (Exception e) { + Assume.assumeNoException(e); + } + } + + @Before + @Override + public void setUp() throws Exception { + store = CloudStoreUtils.getBlobStore(); + } + + @Override + @After + public void tearDown() throws Exception { + ((CloudBlobStore) store).deleteBucket(); + super.tearDown(); + } + + @Override + @Test + public void testGarbageCollection() throws Exception { + } +} \ No newline at end of file Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DataStoreUtils.java =================================================================== --- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DataStoreUtils.java (revision 0) +++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DataStoreUtils.java (working copy) @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document.blob.ds; + +import org.apache.jackrabbit.core.data.DataStore; +import org.apache.jackrabbit.oak.plugins.blob.BlobStoreConfiguration; +import org.apache.jackrabbit.oak.plugins.blob.BlobStoreHelper; +import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore; +import org.apache.jackrabbit.oak.plugins.document.AbstractMongoConnectionTest; + +/** + * Helper for retrieving the {@link DataStoreBlobStore} instantiated with the + * appropriate {@link DataStore}. + */ +public class DataStoreUtils extends AbstractMongoConnectionTest { + public final static String PATH = "./target/repository/"; + + /** + * Gets the blob store. + * + * @return the blob store + * @throws Exception + * the exception + */ + protected static DataStoreBlobStore getBlobStore() throws Exception { + BlobStoreConfiguration config = + BlobStoreConfiguration.newInstance().loadFromSystemProps(); + config.addProperty( + BlobStoreConfiguration.PROP_BLOB_STORE_PROVIDER, DataStoreBlobStore.class.getName()); + config.addProperty("path", PATH + "datastore"); + return (DataStoreBlobStore) BlobStoreHelper.create(config).orNull(); + } +} Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreGetLengthTest.java =================================================================== --- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreGetLengthTest.java (revision 0) +++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreGetLengthTest.java (working copy) @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document.blob.ds; + +import java.io.File; + +import org.apache.commons.io.FileUtils; +import org.apache.jackrabbit.oak.plugins.document.DocumentMK; +import org.apache.jackrabbit.oak.plugins.document.MongoUtils; +import org.apache.jackrabbit.oak.plugins.document.blob.DocumentMKGetLengthTest; +import org.junit.After; +import org.junit.Assume; +import org.junit.Before; +import org.junit.BeforeClass; + +/** + * Tests for {@code DocumentMK#getLength(String)} using {@link DataStore} + */ +public class DocumentMKDataStoreGetLengthTest extends DocumentMKGetLengthTest { + @BeforeClass + public static void setUpBeforeClass() throws Exception { + try { + Assume.assumeNotNull(DataStoreUtils.getBlobStore()); + } catch (Exception e) { + Assume.assumeNoException(e); + } + } + + @Override + @Before + public void setUpConnection() throws Exception { + mongoConnection = MongoUtils.getConnection(); + MongoUtils.dropCollections(mongoConnection.getDB()); + mk = new DocumentMK.Builder().setMongoDB(mongoConnection.getDB()) + .setBlobStore(DataStoreUtils.getBlobStore()).open(); + } + + @Override + @After + public void tearDownConnection() throws Exception { + FileUtils.deleteDirectory(new File(DataStoreUtils.PATH)); + mk.dispose(); + // the db might already be closed + mongoConnection.close(); + mongoConnection = MongoUtils.getConnection(); + MongoUtils.dropCollections(mongoConnection.getDB()); + mongoConnection.close(); + } +} \ No newline at end of file Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreReadTest.java =================================================================== --- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreReadTest.java (revision 0) +++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreReadTest.java (working copy) @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document.blob.ds; + +import java.io.File; + +import org.apache.commons.io.FileUtils; +import org.apache.jackrabbit.oak.plugins.document.DocumentMK; +import org.apache.jackrabbit.oak.plugins.document.MongoUtils; +import org.apache.jackrabbit.oak.plugins.document.blob.DocumentMKReadTest; +import org.junit.After; +import org.junit.Assume; +import org.junit.Before; +import org.junit.BeforeClass; + +/** + * Tests for {@code DocumentMK#read(String, long, byte[], int, int)} using + * {@link DataStore} + */ +public class DocumentMKDataStoreReadTest extends DocumentMKReadTest { + @BeforeClass + public static void setUpBeforeClass() throws Exception { + try { + Assume.assumeNotNull(DataStoreUtils.getBlobStore()); + } catch (Exception e) { + Assume.assumeNoException(e); + } + } + + @Override + @Before + public void setUpConnection() throws Exception { + mongoConnection = MongoUtils.getConnection(); + MongoUtils.dropCollections(mongoConnection.getDB()); + mk = new DocumentMK.Builder().setMongoDB(mongoConnection.getDB()) + .setBlobStore(DataStoreUtils.getBlobStore()).open(); + } + + @Override + @After + public void tearDownConnection() throws Exception { + FileUtils.deleteDirectory(new File(DataStoreUtils.PATH)); + mk.dispose(); + // the db might already be closed + mongoConnection.close(); + mongoConnection = MongoUtils.getConnection(); + MongoUtils.dropCollections(mongoConnection.getDB()); + mongoConnection.close(); + } +} \ No newline at end of file Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreWriteTest.java =================================================================== --- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreWriteTest.java (revision 0) +++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/DocumentMKDataStoreWriteTest.java (working copy) @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document.blob.ds; + +import java.io.File; + +import org.apache.commons.io.FileUtils; +import org.apache.jackrabbit.oak.plugins.document.DocumentMK; +import org.apache.jackrabbit.oak.plugins.document.MongoUtils; +import org.apache.jackrabbit.oak.plugins.document.blob.DocumentMKWriteTest; +import org.junit.After; +import org.junit.Assume; +import org.junit.Before; +import org.junit.BeforeClass; + +/** + * Tests for {@code DocumentMK#write(java.io.InputStream)} using + * {@link DataStore} + */ +public class DocumentMKDataStoreWriteTest extends DocumentMKWriteTest { + @BeforeClass + public static void setUpBeforeClass() throws Exception { + try { + Assume.assumeNotNull(DataStoreUtils.getBlobStore()); + } catch (Exception e) { + Assume.assumeNoException(e); + } + } + + @Override + @Before + public void setUpConnection() throws Exception { + mongoConnection = MongoUtils.getConnection(); + MongoUtils.dropCollections(mongoConnection.getDB()); + mk = new DocumentMK.Builder().setMongoDB(mongoConnection.getDB()) + .setBlobStore(DataStoreUtils.getBlobStore()).open(); + } + + @Override + @After + public void tearDownConnection() throws Exception { + FileUtils.deleteDirectory(new File(DataStoreUtils.PATH)); + mk.dispose(); + // the db might already be closed + mongoConnection.close(); + mongoConnection = MongoUtils.getConnection(); + MongoUtils.dropCollections(mongoConnection.getDB()); + mongoConnection.close(); + } +} \ No newline at end of file Index: oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/MongoDataStoreBlobStoreTest.java =================================================================== --- oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/MongoDataStoreBlobStoreTest.java (revision 0) +++ oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/blob/ds/MongoDataStoreBlobStoreTest.java (working copy) @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.document.blob.ds; + +import java.io.File; + +import org.apache.commons.io.FileUtils; +import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore; +import org.apache.jackrabbit.oak.plugins.document.blob.AbstractBlobStoreTest; +import org.junit.After; +import org.junit.Assume; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * Tests the {@link DataStoreBlobStore} implementation. + */ +public class MongoDataStoreBlobStoreTest extends AbstractBlobStoreTest { + @BeforeClass + public static void setUpBeforeClass() throws Exception { + try { + Assume.assumeNotNull(DataStoreUtils.getBlobStore()); + } catch (Exception e) { + Assume.assumeNoException(e); + } + } + + @Before + @Override + public void setUp() throws Exception { + store = DataStoreUtils.getBlobStore(); + } + + @Override + @Test + public void testCombinedIdentifier() throws Exception { + } + + @Override + @Test + public void testGarbageCollection() throws Exception { + } + + @After + @Override + public void tearDown() throws Exception { + FileUtils.deleteDirectory(new File(DataStoreUtils.PATH)); + super.tearDown(); + } +} \ No newline at end of file Index: oak-core/src/test/resources/aws.properties =================================================================== --- oak-core/src/test/resources/aws.properties (revision 0) +++ oak-core/src/test/resources/aws.properties (working copy) @@ -0,0 +1,38 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# AWS account ID +accessKey= +# AWS secret key +secretKey= +# AWS bucket name +s3Bucket=oakblobstore +# AWS bucket region +# Mapping of S3 regions to their constants +# US Standard us-standard +# US West us-west-2 +# US West (Northern California) us-west-1 +# EU (Ireland) EU +# Asia Pacific (Singapore) ap-southeast-1 +# Asia Pacific (Sydney) ap-southeast-2 +# Asia Pacific (Tokyo) ap-northeast-1 +# South America (Sao Paulo) sa-east-1 +s3Region=us-standard +connectionTimeout=120000 +socketTimeout=120000 +maxConnections=10 +maxErrorRetry=10 Index: oak-it/mk/pom.xml =================================================================== --- oak-it/mk/pom.xml (revision 1566507) +++ oak-it/mk/pom.xml (working copy) @@ -108,6 +108,18 @@ com.google.guava guava + + org.apache.jackrabbit + jackrabbit-data + ${jackrabbit.version} + test + + + org.apache.jackrabbit + jackrabbit-aws-ext + ${jackrabbit.version} + test + Index: oak-it/mk/src/test/java/org/apache/jackrabbit/mk/test/BaseMongoMicroKernelFixture.java =================================================================== --- oak-it/mk/src/test/java/org/apache/jackrabbit/mk/test/BaseMongoMicroKernelFixture.java (revision 1566507) +++ oak-it/mk/src/test/java/org/apache/jackrabbit/mk/test/BaseMongoMicroKernelFixture.java (working copy) @@ -39,7 +39,7 @@ private MongoConnection mongoConnection = null; - private MongoConnection getMongoConnection() throws Exception { + protected MongoConnection getMongoConnection() throws Exception { if (mongoConnection == null) { mongoConnection = new MongoConnection(HOST, PORT, DB); } @@ -93,7 +93,7 @@ protected abstract BlobStore getBlobStore(DB db); - private static void dropCollections(DB db) { + protected void dropCollections(DB db) { db.getCollection(MongoBlobStore.COLLECTION_BLOBS).drop(); db.getCollection(Collection.NODES.toString()).drop(); } Index: oak-it/mk/src/test/java/org/apache/jackrabbit/mk/test/MongoCloudBlobMicroKernelFixture.java =================================================================== --- oak-it/mk/src/test/java/org/apache/jackrabbit/mk/test/MongoCloudBlobMicroKernelFixture.java (revision 0) +++ oak-it/mk/src/test/java/org/apache/jackrabbit/mk/test/MongoCloudBlobMicroKernelFixture.java (working copy) @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mk.test; + +import org.apache.jackrabbit.mk.api.MicroKernel; +import org.apache.jackrabbit.mk.blobs.BlobStore; +import org.apache.jackrabbit.oak.plugins.blob.BlobStoreConfiguration; +import org.apache.jackrabbit.oak.plugins.blob.cloud.CloudBlobStore; +import org.apache.jackrabbit.oak.plugins.blob.cloud.CloudBlobStoreBuilder; +import org.apache.jackrabbit.oak.plugins.document.Collection; +import org.apache.jackrabbit.oak.plugins.document.DocumentMK; +import org.apache.jackrabbit.oak.plugins.document.mongo.MongoBlobStore; +import org.apache.jackrabbit.oak.plugins.document.util.MongoConnection; + +import com.mongodb.DB; + +/** + * The Class MongoCloudBlobMicroKernelFixture. + */ +public class MongoCloudBlobMicroKernelFixture extends BaseMongoMicroKernelFixture { + + /** The blob store. */ + private BlobStore blobStore; + + /** + * Open connection. + * + * @throws Exception + */ + protected void openConnection() throws Exception { + if (blobStore == null) { + blobStore = + CloudBlobStoreBuilder + .newInstance() + .build( + BlobStoreConfiguration.newInstance().loadFromSystemProps()).get(); + } + } + + @Override + protected BlobStore getBlobStore(com.mongodb.DB db) { + return blobStore; + } + + @Override + public void setUpCluster(MicroKernel[] cluster) throws Exception { + MongoConnection connection = getMongoConnection(); + openConnection(); + DB db = connection.getDB(); + dropCollections(db); + + for (int i = 0; i < cluster.length; i++) { + cluster[i] = new DocumentMK.Builder(). + setMongoDB(db).setBlobStore(blobStore).setClusterId(i).open(); + } + } + + @Override + protected void dropCollections(DB db) { + db.getCollection(MongoBlobStore.COLLECTION_BLOBS).drop(); + db.getCollection(Collection.NODES.toString()).drop(); + ((CloudBlobStore) blobStore).deleteBucket(); + } +} Index: oak-it/mk/src/test/java/org/apache/jackrabbit/mk/test/MongoDataStoreBlobMicroKernelFixture.java =================================================================== --- oak-it/mk/src/test/java/org/apache/jackrabbit/mk/test/MongoDataStoreBlobMicroKernelFixture.java (revision 0) +++ oak-it/mk/src/test/java/org/apache/jackrabbit/mk/test/MongoDataStoreBlobMicroKernelFixture.java (working copy) @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.mk.test; + +import org.apache.jackrabbit.core.data.DataStoreException; +import org.apache.jackrabbit.mk.api.MicroKernel; +import org.apache.jackrabbit.mk.blobs.BlobStore; +import org.apache.jackrabbit.oak.plugins.blob.BlobStoreConfiguration; +import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore; +import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStoreBuilder; +import org.apache.jackrabbit.oak.plugins.document.Collection; +import org.apache.jackrabbit.oak.plugins.document.DocumentMK; +import org.apache.jackrabbit.oak.plugins.document.mongo.MongoBlobStore; +import org.apache.jackrabbit.oak.plugins.document.util.MongoConnection; + +import com.mongodb.DB; + +/** + * The Class MongoCloudBlobMicroKernelFixture. + */ +public class MongoDataStoreBlobMicroKernelFixture extends BaseMongoMicroKernelFixture { + + /** The blob store. */ + private BlobStore blobStore; + + /** + * Open connection. + * + * @throws Exception + */ + protected void openConnection() throws Exception { + if (blobStore == null) { + blobStore = + DataStoreBlobStoreBuilder + .newInstance() + .build( + BlobStoreConfiguration.newInstance().loadFromSystemProps()).get(); + } + } + + @Override + protected BlobStore getBlobStore(com.mongodb.DB db) { + return blobStore; + } + + @Override + public void setUpCluster(MicroKernel[] cluster) throws Exception { + MongoConnection connection = getMongoConnection(); + openConnection(); + DB db = connection.getDB(); + dropCollections(db); + + for (int i = 0; i < cluster.length; i++) { + cluster[i] = new DocumentMK.Builder(). + setMongoDB(db).setBlobStore(blobStore).setClusterId(i).open(); + } + } + + @Override + protected void dropCollections(DB db) { + db.getCollection(MongoBlobStore.COLLECTION_BLOBS).drop(); + db.getCollection(Collection.NODES.toString()).drop(); + try { + ((DataStoreBlobStore) blobStore).clearInUse(); + ((DataStoreBlobStore) blobStore).getDataStore() + .deleteAllOlderThan(System.currentTimeMillis() + 1000000); + } catch (DataStoreException e) { + } + } +} Index: oak-parent/pom.xml =================================================================== --- oak-parent/pom.xml (revision 1566507) +++ oak-parent/pom.xml (working copy) @@ -333,6 +333,12 @@ logback-classic ${logback.version} + + org.apache.jclouds.provider + aws-s3 + 1.7.0 + + Index: oak-run/pom.xml =================================================================== --- oak-run/pom.xml (revision 1566507) +++ oak-run/pom.xml (working copy) @@ -154,6 +154,16 @@ ${jackrabbit.version} + org.apache.jackrabbit + jackrabbit-data + ${jackrabbit.version} + + + org.apache.jackrabbit + jackrabbit-aws-ext + ${jackrabbit.version} + + org.apache.commons commons-math 2.0 @@ -172,7 +182,10 @@ ch.qos.logback logback-classic - + + org.apache.jclouds.provider + aws-s3 + junit Index: oak-run/src/main/java/org/apache/jackrabbit/oak/fixture/OakRepositoryFixture.java =================================================================== --- oak-run/src/main/java/org/apache/jackrabbit/oak/fixture/OakRepositoryFixture.java (revision 1566507) +++ oak-run/src/main/java/org/apache/jackrabbit/oak/fixture/OakRepositoryFixture.java (working copy) @@ -23,13 +23,18 @@ import org.apache.commons.io.FileUtils; import org.apache.jackrabbit.api.JackrabbitRepository; import org.apache.jackrabbit.mk.api.MicroKernel; +import org.apache.jackrabbit.mk.blobs.BlobStore; import org.apache.jackrabbit.mk.core.MicroKernelImpl; +import org.apache.jackrabbit.oak.Oak; +import org.apache.jackrabbit.oak.jcr.Jcr; +import org.apache.jackrabbit.oak.kernel.KernelNodeStore; +import org.apache.jackrabbit.oak.plugins.blob.BlobStoreConfiguration; +import org.apache.jackrabbit.oak.plugins.blob.BlobStoreHelper; +import org.apache.jackrabbit.oak.plugins.blob.cloud.CloudBlobStore; +import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore; import org.apache.jackrabbit.oak.plugins.document.DocumentMK; import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore; import org.apache.jackrabbit.oak.plugins.document.util.MongoConnection; -import org.apache.jackrabbit.oak.Oak; -import org.apache.jackrabbit.oak.jcr.Jcr; -import org.apache.jackrabbit.oak.kernel.KernelNodeStore; import org.apache.jackrabbit.oak.plugins.segment.SegmentNodeStore; import org.apache.jackrabbit.oak.plugins.segment.SegmentStore; import org.apache.jackrabbit.oak.plugins.segment.file.FileStore; @@ -84,9 +89,25 @@ public static RepositoryFixture getMongo( final String host, final int port, final String database, final boolean dropDBAfterTest, final long cacheSize) { + return new OakRepositoryFixture("Oak-Mongo") { private String dbName = database != null ? database : unique; private DocumentMK[] kernels; + private BlobStore blobStore; + + private BlobStore getBlobStore() { + BlobStoreConfiguration config = + BlobStoreConfiguration.newInstance().loadFromSystemProps(); + try { + blobStore = + BlobStoreHelper.create(config).orNull(); + } catch (Exception e) { + throw new RuntimeException(e); + } + + return blobStore; + } + @Override protected Repository[] internalSetUpCluster(int n) throws Exception { Repository[] cluster = new Repository[n]; @@ -94,14 +115,23 @@ for (int i = 0; i < cluster.length; i++) { MongoConnection mongo = new MongoConnection(host, port, dbName); + BlobStore blobStore = getBlobStore(); + if (blobStore == null) { kernels[i] = new DocumentMK.Builder(). setMongoDB(mongo.getDB()). setClusterId(i).setLogging(false).open(); + } else { + kernels[i] = new DocumentMK.Builder(). + setMongoDB(mongo.getDB()). + setBlobStore(blobStore). + setClusterId(i).setLogging(false).open(); + } Oak oak = new Oak(new KernelNodeStore(kernels[i], cacheSize)); cluster[i] = new Jcr(oak).createRepository(); } return cluster; } + @Override public void tearDownCluster() { super.tearDownCluster(); @@ -114,6 +144,13 @@ new MongoConnection(host, port, dbName); mongo.getDB().dropDatabase(); mongo.close(); + if (blobStore instanceof CloudBlobStore) { + ((CloudBlobStore) blobStore).deleteBucket(); + } else if (blobStore instanceof DataStoreBlobStore) { + ((DataStoreBlobStore) blobStore).clearInUse(); + ((DataStoreBlobStore) blobStore).deleteAllOlderThan( + System.currentTimeMillis() + 10000000); + } } catch (Exception e) { throw new RuntimeException(e); } @@ -125,9 +162,25 @@ public static RepositoryFixture getMongoNS( final String host, final int port, final String database, final boolean dropDBAfterTest, final long cacheSize) { + return new OakRepositoryFixture("Oak-MongoNS") { private String dbName = database != null ? database : unique; private DocumentNodeStore[] stores; + private BlobStore blobStore; + + private BlobStore getBlobStore() { + BlobStoreConfiguration config = + BlobStoreConfiguration.newInstance().loadFromSystemProps(); + try { + blobStore = + BlobStoreHelper.create(config).orNull(); + } catch (Exception e) { + throw new RuntimeException(e); + } + + return blobStore; + } + @Override protected Repository[] internalSetUpCluster(int n) throws Exception { Repository[] cluster = new Repository[n]; @@ -135,15 +188,25 @@ for (int i = 0; i < cluster.length; i++) { MongoConnection mongo = new MongoConnection(host, port, dbName); + BlobStore blobStore = getBlobStore(); + if (blobStore == null) { stores[i] = new DocumentMK.Builder(). setMongoDB(mongo.getDB()). memoryCacheSize(cacheSize). setClusterId(i).setLogging(false).getNodeStore(); + } else { + stores[i] = new DocumentMK.Builder(). + setMongoDB(mongo.getDB()). + setBlobStore(blobStore). + memoryCacheSize(cacheSize). + setClusterId(i).setLogging(false).getNodeStore(); + } Oak oak = new Oak(stores[i]); cluster[i] = new Jcr(oak).createRepository(); } return cluster; } + @Override public void tearDownCluster() { super.tearDownCluster(); @@ -156,6 +219,13 @@ new MongoConnection(host, port, dbName); mongo.getDB().dropDatabase(); mongo.close(); + if (blobStore instanceof CloudBlobStore) { + ((CloudBlobStore) blobStore).deleteBucket(); + } else if (blobStore instanceof DataStoreBlobStore) { + ((DataStoreBlobStore) blobStore).clearInUse(); + ((DataStoreBlobStore) blobStore).deleteAllOlderThan( + System.currentTimeMillis() + 10000000); + } } catch (Exception e) { throw new RuntimeException(e); }