diff --git oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriter.java oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriter.java index 0cc806c192..92d6de9e29 100644 --- oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriter.java +++ oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriter.java @@ -93,6 +93,8 @@ public class DefaultSegmentWriter implements SegmentWriter { private static final int CHILD_NODE_UPDATE_LIMIT = Integer .getInteger("child.node.update.limit", 10000); + protected static final String MAX_MAP_RECORD_SIZE_KEY = "oak.segmentNodeStore.maxMapRecordSize"; + @NotNull private final WriterCacheManager cacheManager; @@ -237,6 +239,11 @@ public class DefaultSegmentWriter implements SegmentWriter { private RecordId writeMap(@Nullable MapRecord base, @NotNull Map changes) throws IOException { if (base != null) { + int maxMapRecordSize = Integer.getInteger(MAX_MAP_RECORD_SIZE_KEY, 0); + if (base.size() > maxMapRecordSize) { + System.setProperty(MAX_MAP_RECORD_SIZE_KEY, String.valueOf(base.size())); + } + if (base.size() >= MapRecord.WARN_SIZE) { if (base.size() >= MapRecord.ERROR_SIZE_HARD_STOP) { throw new UnsupportedOperationException("Map record has more than " + MapRecord.ERROR_SIZE_HARD_STOP diff --git oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriterTest.java oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriterTest.java index b968fb1f78..d707b52feb 100644 --- oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriterTest.java +++ oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/DefaultSegmentWriterTest.java @@ -309,6 +309,25 @@ public class DefaultSegmentWriterTest { unsubscribe(logAppender); } + @Test + public void testMapRecordMaxSizeSystemProperty() throws IOException { + System.setProperty(DefaultSegmentWriter.MAX_MAP_RECORD_SIZE_KEY, String.valueOf(0)); + + RecordId blockId = writer.writeBlock(bytes, 0, bytes.length); + Map map = newHashMap(); + for (int i = 0; i < 1000; i++) { + map.put("key" + i, blockId); + } + + // write first a map record with 1000 entries + MapRecord many = new MapRecord(store.fileStore().getReader(), writer.writeMap(null, map)); + + // update it by adding one more entry + MapRecord one = new MapRecord(store.fileStore().getReader(), writer.writeMap(many, ImmutableMap.of("one", blockId))); + + assertEquals(1000, (int) Integer.getInteger(DefaultSegmentWriter.MAX_MAP_RECORD_SIZE_KEY, 0)); + } + @Test public void testMapRemoveNonExisting() throws IOException { RecordId blockId = writer.writeBlock(bytes, 0, bytes.length);