diff --git oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/FileAccess.java oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/FileAccess.java index 5c9f9de..b74d548 100644 --- oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/FileAccess.java +++ oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/FileAccess.java @@ -99,7 +99,7 @@ abstract class FileAccess { } @Override - public int length() throws IOException { + public synchronized int length() throws IOException { long length = file.length(); checkState(length < Integer.MAX_VALUE); return (int) length; diff --git oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompactionAndCleanupIT.java oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompactionAndCleanupIT.java index 5ef5cad..5cf4f6f 100644 --- oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompactionAndCleanupIT.java +++ oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompactionAndCleanupIT.java @@ -63,6 +63,7 @@ import org.apache.jackrabbit.oak.api.CommitFailedException; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.commons.concurrent.ExecutorCloser; +import org.apache.jackrabbit.oak.plugins.blob.ReferenceCollector; import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions; import org.apache.jackrabbit.oak.segment.file.FileStore; import org.apache.jackrabbit.oak.segment.file.FileStoreGCMonitor; @@ -1040,6 +1041,69 @@ public class CompactionAndCleanupIT { } } + @Test + public void randomAccessFileConcurrentReadAndLength() throws Exception { + final FileStore fileStore = fileStoreBuilder(getFileStoreFolder()) + .withGCOptions(defaultGCOptions().setRetainedGenerations(2)) + .withMaxFileSize(1) + .build(); + + final SegmentNodeStore nodeStore = SegmentNodeStoreBuilders.builder(fileStore).build(); + ExecutorService executorService = newFixedThreadPool(300); + final AtomicInteger counter = new AtomicInteger(); + final ReferenceCollector dummyCollector = new ReferenceCollector() { + + @Override + public void addReference(String reference, String nodeId) { + // do nothing + } + }; + + try { + Callable concurrentWriteTask = new Callable() { + @Override + public Void call() throws Exception { + NodeBuilder builder = nodeStore.getRoot().builder(); + builder.setProperty("blob-" + counter.getAndIncrement(), createBlob(nodeStore, 25 * 25)); + nodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); + fileStore.flush(); + return null; + } + }; + + Callable concurrentCleanupTask = new Callable() { + @Override + public Void call() throws Exception { + fileStore.cleanup(); + return null; + } + }; + + Callable concurrentReferenceCollector = new Callable() { + @Override + public Void call() throws Exception { + fileStore.collectBlobReferences(dummyCollector); + return null; + } + }; + + List> results = newArrayList(); + for (int i = 0; i < 100; i++) { + results.add(executorService.submit(concurrentWriteTask)); + results.add(executorService.submit(concurrentCleanupTask)); + results.add(executorService.submit(concurrentReferenceCollector)); + } + + for (Future result : results) { + assertNull(result.get()); + } + + } finally { + new ExecutorCloser(executorService).close(); + fileStore.close(); + } + } + private static void addContent(NodeBuilder builder) { for (int k = 0; k < 10000; k++) { builder.setProperty(UUID.randomUUID().toString(), UUID.randomUUID().toString());