diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/RecordWriters.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/RecordWriters.java new file mode 100644 index 0000000..a459a95 --- /dev/null +++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/RecordWriters.java @@ -0,0 +1,374 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.jackrabbit.oak.plugins.segment; + +import static java.util.Collections.singleton; +import static org.apache.jackrabbit.oak.plugins.segment.RecordType.BLOCK; +import static org.apache.jackrabbit.oak.plugins.segment.RecordType.BRANCH; +import static org.apache.jackrabbit.oak.plugins.segment.RecordType.BUCKET; +import static org.apache.jackrabbit.oak.plugins.segment.RecordType.LEAF; +import static org.apache.jackrabbit.oak.plugins.segment.RecordType.LIST; +import static org.apache.jackrabbit.oak.plugins.segment.RecordType.NODE; +import static org.apache.jackrabbit.oak.plugins.segment.RecordType.TEMPLATE; +import static org.apache.jackrabbit.oak.plugins.segment.RecordType.VALUE; +import static org.apache.jackrabbit.oak.plugins.segment.SegmentVersion.V_11; + +import static org.apache.jackrabbit.oak.plugins.segment.Segment.SMALL_LIMIT; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +public class RecordWriters { + + private RecordWriters() { + + } + + public abstract static class RecordWriter { + private final RecordType type; + protected final int size; + protected final Collection ids; + + protected RecordWriter(RecordType type, int size, + Collection ids) { + this.type = type; + this.size = size; + this.ids = ids; + } + + protected RecordWriter(RecordType type, int size, RecordId id) { + this(type, size, singleton(id)); + } + + protected RecordWriter(RecordType type, int size) { + this(type, size, Collections. emptyList()); + } + + public final T write(SegmentBuilder builder) { + RecordId id = builder.prepare(type, size, ids); + return writeRecordContent(id, builder); + } + + protected abstract T writeRecordContent(RecordId id, + SegmentBuilder builder); + } + + public static class MapLeafWriter extends RecordWriter { + + private final int level; + private final Collection entries; + + protected MapLeafWriter() { + super(LEAF, 4); + this.level = 0; + this.entries = Collections. emptyList(); + } + + protected MapLeafWriter(int level, Collection entries, + List ids) { + super(LEAF, 4 + entries.size() * 4, ids); + this.level = level; + this.entries = entries; + } + + @Override + protected MapRecord writeRecordContent(RecordId id, + SegmentBuilder builder) { + if (level == 0) { + builder.writeInt(0); + } else { + builder.writeInt((level << MapRecord.SIZE_BITS) + | entries.size()); + + // copy the entries to an array so we can sort them before + // writing + final MapEntry[] array = entries.toArray(new MapEntry[entries + .size()]); + Arrays.sort(array); + for (MapEntry entry : array) { + builder.writeInt(entry.getHash()); + } + for (MapEntry entry : array) { + builder.writeRecordId(entry.getKey()); + builder.writeRecordId(entry.getValue()); + } + } + + return new MapRecord(id); + } + } + + public static class MapBranchWriter extends RecordWriter { + + private final int level; + private final int bitmap; + + protected MapBranchWriter(int level, int bitmap, List ids) { + super(BRANCH, 8, ids); + this.level = level; + this.bitmap = bitmap; + } + + @Override + protected MapRecord writeRecordContent(RecordId id, + SegmentBuilder builder) { + builder.writeInt(level); + builder.writeInt(bitmap); + for (RecordId buckedId : ids) { + builder.writeRecordId(buckedId); + } + return new MapRecord(id); + } + } + + public static class ListWriter extends RecordWriter { + + private final int count; + private final RecordId lid; + + protected ListWriter() { + this(0, null); + } + + protected ListWriter(int count, RecordId lid) { + super(LIST, 4, lid); + this.count = count; + this.lid = lid; + } + + @Override + protected RecordId writeRecordContent(RecordId id, + SegmentBuilder builder) { + builder.writeInt(count); + if (lid != null) { + builder.writeRecordId(lid); + } + return id; + } + } + + public static class ListBucketWriter extends RecordWriter { + + protected ListBucketWriter(List ids) { + super(BUCKET, 0, ids); + } + + @Override + protected RecordId writeRecordContent(RecordId id, + SegmentBuilder builder) { + for (RecordId bucketId : ids) { + builder.writeRecordId(bucketId); + } + return id; + } + } + + public static class BlockWriter extends RecordWriter { + + private final byte[] bytes; + private final int offset; + + protected BlockWriter(byte[] bytes, int offset, int length) { + super(BLOCK, length); + this.bytes = bytes; + this.offset = offset; + } + + @Override + protected RecordId writeRecordContent(RecordId id, + SegmentBuilder builder) { + builder.writeBytes(bytes, offset, size); + return id; + } + } + + public static class SingleValueWriter extends RecordWriter { + + private final RecordId rid; + private final long len; + + protected SingleValueWriter(RecordId rid, long len) { + super(VALUE, 8, rid); + this.rid = rid; + this.len = len; + } + + @Override + protected RecordId writeRecordContent(RecordId id, + SegmentBuilder builder) { + builder.writeLong(len); + builder.writeRecordId(rid); + return id; + } + } + + public static class ByteValueWriter extends RecordWriter { + + private final int length; + private final byte[] data; + + protected ByteValueWriter(int length, byte[] data) { + super(VALUE, length + getSizeDelta(length)); + this.length = length; + this.data = data; + } + + private static boolean isSmallSize(int length) { + return length < SMALL_LIMIT; + } + + private static int getSizeDelta(int length) { + if (isSmallSize(length)) { + return 1; + } else { + return 2; + } + } + + @Override + protected RecordId writeRecordContent(RecordId id, + SegmentBuilder builder) { + if (isSmallSize(length)) { + builder.writeByte((byte) length); + } else { + builder.writeShort((short) ((length - SMALL_LIMIT) | 0x8000)); + } + builder.writeBytes(data, 0, length); + return id; + } + } + + public static class LargeBlobWriter extends RecordWriter { + + private final RecordId stringRecord; + + protected LargeBlobWriter(RecordId stringRecord) { + super(VALUE, 1, stringRecord); + this.stringRecord = stringRecord; + } + + @Override + protected RecordId writeRecordContent(RecordId id, + SegmentBuilder builder) { + // The length uses a fake "length" field that is always equal to + // 0xF0. + // This allows the code to take apart small from a large blob IDs. + builder.writeByte((byte) 0xF0); + builder.writeRecordId(stringRecord); + builder.addBlobRef(id); + return id; + } + } + + public static class SmallBlobIdWriter extends RecordWriter { + + private final byte[] blobId; + + protected SmallBlobIdWriter(byte[] blobId) { + super(VALUE, 2 + blobId.length); + this.blobId = blobId; + } + + @Override + protected RecordId writeRecordContent(RecordId id, + SegmentBuilder builder) { + int length = blobId.length; + builder.writeShort((short) (length | 0xE000)); + builder.writeBytes(blobId, 0, length); + builder.addBlobRef(id); + return id; + } + } + + public static class TemplateWriter extends RecordWriter { + + private final RecordId[] propertyNames; + private final byte[] propertyTypes; + private final int finalHead; + private final RecordId finalPrimaryId; + private final List finalMixinIds; + private final RecordId finalChildNameId; + private final RecordId finalPropNamesId; + private final SegmentVersion version; + + protected TemplateWriter(Collection ids, + final RecordId[] propertyNames, final byte[] propertyTypes, + final int finalHead, final RecordId finalPrimaryId, + final List finalMixinIds, + final RecordId finalChildNameId, + final RecordId finalPropNamesId, SegmentVersion version) { + super(TEMPLATE, 4 + propertyTypes.length, ids); + this.propertyNames = propertyNames; + this.propertyTypes = propertyTypes; + this.finalHead = finalHead; + this.finalPrimaryId = finalPrimaryId; + this.finalMixinIds = finalMixinIds; + this.finalChildNameId = finalChildNameId; + this.finalPropNamesId = finalChildNameId; + this.version = version; + } + + @Override + protected RecordId writeRecordContent(RecordId id, + SegmentBuilder builder) { + builder.writeInt(finalHead); + if (finalPrimaryId != null) { + builder.writeRecordId(finalPrimaryId); + } + if (finalMixinIds != null) { + for (RecordId mixinId : finalMixinIds) { + builder.writeRecordId(mixinId); + } + } + if (finalChildNameId != null) { + builder.writeRecordId(finalChildNameId); + } + if (version.onOrAfter(V_11)) { + if (finalPropNamesId != null) { + builder.writeRecordId(finalPropNamesId); + } + } + for (int i = 0; i < propertyNames.length; i++) { + if (!version.onOrAfter(V_11)) { + // V10 only + builder.writeRecordId(propertyNames[i]); + } + builder.writeByte(propertyTypes[i]); + } + return id; + } + } + + public static class NodeStateWriter extends RecordWriter { + + protected NodeStateWriter(List ids) { + super(NODE, 0, ids); + } + + @Override + protected SegmentNodeState writeRecordContent(RecordId id, + SegmentBuilder builder) { + for (RecordId recordId : ids) { + builder.writeRecordId(recordId); + } + return new SegmentNodeState(id); + } + } + +} diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentBuilder.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentBuilder.java index 09808f9..e0e59d9 100644 --- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentBuilder.java +++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentBuilder.java @@ -19,6 +19,7 @@ package org.apache.jackrabbit.oak.plugins.segment; +import static com.google.common.base.Charsets.UTF_8; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; @@ -28,7 +29,6 @@ import static com.google.common.collect.Sets.newHashSet; import static java.lang.System.arraycopy; import static java.lang.System.currentTimeMillis; import static java.lang.System.identityHashCode; -import static org.apache.jackrabbit.oak.plugins.segment.RecordType.VALUE; import static org.apache.jackrabbit.oak.plugins.segment.Segment.MAX_SEGMENT_SIZE; import static org.apache.jackrabbit.oak.plugins.segment.Segment.RECORD_ID_BYTES; import static org.apache.jackrabbit.oak.plugins.segment.Segment.SEGMENT_REFERENCE_LIMIT; @@ -36,14 +36,13 @@ import static org.apache.jackrabbit.oak.plugins.segment.Segment.align; import java.nio.ByteBuffer; import java.util.Collection; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; -import com.google.common.base.Charsets; +import org.apache.jackrabbit.oak.plugins.segment.RecordWriters.ByteValueWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -136,16 +135,8 @@ public class SegmentBuilder { ",\"gc\":" + tracker.getCompactionMap().getGeneration() + ",\"t\":" + currentTimeMillis() + "}"; - byte[] data = metaInfo.getBytes(Charsets.UTF_8); - if (data.length < Segment.SMALL_LIMIT) { - prepare(VALUE, data.length + 1, Collections.emptyList()); - writeByte((byte) data.length); - writeBytes(data, 0, data.length); - } else { - prepare(VALUE, data.length + 2, Collections.emptyList()); - writeShort((short) ((data.length - Segment.SMALL_LIMIT) | 0x8000)); - writeBytes(data, 0, data.length); - } + byte[] data = metaInfo.getBytes(UTF_8); + new ByteValueWriter(data.length, data).write(this); } static byte[] createNewBuffer(SegmentVersion v) { diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java index c2ef360..bcf59b3 100644 --- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java +++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/SegmentWriter.java @@ -16,6 +16,7 @@ */ package org.apache.jackrabbit.oak.plugins.segment; +import static com.google.common.base.Charsets.UTF_8; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkElementIndex; import static com.google.common.base.Preconditions.checkNotNull; @@ -24,26 +25,23 @@ import static com.google.common.base.Preconditions.checkPositionIndexes; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.Iterables.addAll; import static com.google.common.collect.Lists.newArrayList; +import static com.google.common.collect.Lists.newArrayListWithCapacity; import static com.google.common.collect.Lists.newArrayListWithExpectedSize; +import static com.google.common.collect.Lists.partition; import static com.google.common.collect.Maps.newHashMap; import static com.google.common.collect.Sets.newHashSet; +import static com.google.common.io.ByteStreams.read; +import static com.google.common.io.Closeables.close; import static java.lang.Thread.currentThread; import static java.util.Arrays.asList; import static java.util.Collections.emptyMap; import static java.util.Collections.nCopies; -import static java.util.Collections.singleton; import static org.apache.jackrabbit.oak.api.Type.BINARIES; +import static org.apache.jackrabbit.oak.api.Type.BINARY; import static org.apache.jackrabbit.oak.api.Type.NAME; import static org.apache.jackrabbit.oak.api.Type.NAMES; +import static org.apache.jackrabbit.oak.api.Type.STRING; import static org.apache.jackrabbit.oak.plugins.segment.MapRecord.BUCKETS_PER_LEVEL; -import static org.apache.jackrabbit.oak.plugins.segment.RecordType.BLOCK; -import static org.apache.jackrabbit.oak.plugins.segment.RecordType.BRANCH; -import static org.apache.jackrabbit.oak.plugins.segment.RecordType.BUCKET; -import static org.apache.jackrabbit.oak.plugins.segment.RecordType.LEAF; -import static org.apache.jackrabbit.oak.plugins.segment.RecordType.LIST; -import static org.apache.jackrabbit.oak.plugins.segment.RecordType.NODE; -import static org.apache.jackrabbit.oak.plugins.segment.RecordType.TEMPLATE; -import static org.apache.jackrabbit.oak.plugins.segment.RecordType.VALUE; import static org.apache.jackrabbit.oak.plugins.segment.Segment.MAX_SEGMENT_SIZE; import static org.apache.jackrabbit.oak.plugins.segment.Segment.align; import static org.apache.jackrabbit.oak.plugins.segment.Segment.readString; @@ -53,9 +51,7 @@ import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.SequenceInputStream; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -63,15 +59,22 @@ import java.util.Set; import javax.jcr.PropertyType; -import com.google.common.base.Charsets; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.io.ByteStreams; -import com.google.common.io.Closeables; import org.apache.jackrabbit.oak.api.Blob; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.plugins.memory.ModifiedNodeState; +import org.apache.jackrabbit.oak.plugins.segment.RecordWriters.BlockWriter; +import org.apache.jackrabbit.oak.plugins.segment.RecordWriters.ByteValueWriter; +import org.apache.jackrabbit.oak.plugins.segment.RecordWriters.LargeBlobWriter; +import org.apache.jackrabbit.oak.plugins.segment.RecordWriters.ListBucketWriter; +import org.apache.jackrabbit.oak.plugins.segment.RecordWriters.ListWriter; +import org.apache.jackrabbit.oak.plugins.segment.RecordWriters.MapBranchWriter; +import org.apache.jackrabbit.oak.plugins.segment.RecordWriters.MapLeafWriter; +import org.apache.jackrabbit.oak.plugins.segment.RecordWriters.NodeStateWriter; +import org.apache.jackrabbit.oak.plugins.segment.RecordWriters.RecordWriter; +import org.apache.jackrabbit.oak.plugins.segment.RecordWriters.SingleValueWriter; +import org.apache.jackrabbit.oak.plugins.segment.RecordWriters.SmallBlobIdWriter; +import org.apache.jackrabbit.oak.plugins.segment.RecordWriters.TemplateWriter; import org.apache.jackrabbit.oak.spi.blob.BlobStore; import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry; import org.apache.jackrabbit.oak.spi.state.DefaultNodeStateDiff; @@ -170,19 +173,8 @@ public class SegmentWriter { if (value.equals(entry.getValue())) { return base; } else { - final MapRecord baseMap = base; - RecordId mapId = writeRecord(new RecordWriter(BRANCH, 8, asList( - entry.getKey(), value, baseMap.getRecordId())) { - @Override - protected void write(RecordId id, SegmentBuilder builder) { - builder.writeInt(-1); - builder.writeInt(entry.getHash()); - builder.writeRecordId(entry.getKey()); - builder.writeRecordId(value); - builder.writeRecordId(baseMap.getRecordId()); - } - }); - return new MapRecord(mapId); + return writeRecord(new MapBranchWriter(-1, entry.getHash(), + asList(entry.getKey(), value, base.getRecordId()))); } } } @@ -207,7 +199,6 @@ public class SegmentWriter { entries.add(new MapEntry(key, keyId, entry.getValue())); } } - return writeMapBucket(base, entries, 0); } @@ -219,54 +210,25 @@ public class SegmentWriter { checkPositionIndex(level, MapRecord.MAX_NUMBER_OF_LEVELS); checkArgument(size != 0 || level == MapRecord.MAX_NUMBER_OF_LEVELS); - List ids = Lists.newArrayListWithCapacity(2 * size); + List ids = newArrayListWithCapacity(2 * size); for (MapEntry entry : entries) { ids.add(entry.getKey()); ids.add(entry.getValue()); } - - // copy the entries to an array so we can sort them before writing - final MapEntry[] array = entries.toArray(new MapEntry[entries.size()]); - Arrays.sort(array); - - RecordId mapId = writeRecord(new RecordWriter(LEAF, 4 + size * 4, ids) { - @Override - protected void write(RecordId id, SegmentBuilder builder) { - builder.writeInt((level << MapRecord.SIZE_BITS) | size); - for (MapEntry entry : array) { - builder.writeInt(entry.getHash()); - } - for (MapEntry entry : array) { - builder.writeRecordId(entry.getKey()); - builder.writeRecordId(entry.getValue()); - } - } - }); - return new MapRecord(mapId); + return writeRecord(new MapLeafWriter(level, entries, ids)); } private MapRecord writeMapBranch(final int level, final int size, MapRecord[] buckets) { int bitmap = 0; - final List bucketIds = Lists.newArrayListWithCapacity(buckets.length); + final List bucketIds = newArrayListWithCapacity(buckets.length); for (int i = 0; i < buckets.length; i++) { if (buckets[i] != null) { bitmap |= 1L << i; bucketIds.add(buckets[i].getRecordId()); } } - - final int bits = bitmap; - RecordId mapId = writeRecord(new RecordWriter(BRANCH, 8, bucketIds) { - @Override - protected void write(RecordId id, SegmentBuilder builder) { - builder.writeInt((level << MapRecord.SIZE_BITS) | size); - builder.writeInt(bits); - for (RecordId buckedId : bucketIds) { - builder.writeRecordId(buckedId); - } - } - }); - return new MapRecord(mapId); + int levelIn = (level << MapRecord.SIZE_BITS) | size; + return writeRecord(new MapBranchWriter(levelIn, bitmap, bucketIds)); } private MapRecord writeMapBucket(MapRecord base, Collection entries, int level) { @@ -275,13 +237,7 @@ public class SegmentWriter { if (base != null) { return base; } else if (level == 0) { - RecordId mapId = writeRecord(new RecordWriter(LEAF, 4) { - @Override - protected void write(RecordId id, SegmentBuilder builder) { - builder.writeInt(0); - } - }); - return new MapRecord(mapId); + return writeRecord(new MapLeafWriter()); } else { return null; } @@ -372,7 +328,7 @@ public class SegmentWriter { while (thisLevel.size() > 1) { List nextLevel = newArrayList(); for (List bucket : - Lists.partition(thisLevel, ListRecord.LEVEL_SIZE)) { + partition(thisLevel, ListRecord.LEVEL_SIZE)) { if (bucket.size() > 1) { nextLevel.add(writeListBucket(bucket)); } else { @@ -386,14 +342,7 @@ public class SegmentWriter { private RecordId writeListBucket(final List bucket) { checkArgument(bucket.size() > 1); - return writeRecord(new RecordWriter(BUCKET, 0, bucket) { - @Override - protected void write(RecordId id, SegmentBuilder builder) { - for (RecordId bucketId : bucket) { - builder.writeRecordId(bucketId); - } - } - }); + return writeRecord(new ListBucketWriter(bucket)); } private static List> splitToBuckets(Collection entries, int level) { @@ -416,35 +365,13 @@ public class SegmentWriter { } private RecordId writeValueRecord(final long length, final RecordId blocks) { - return writeRecord(new RecordWriter(VALUE, 8, blocks) { - @Override - protected void write(RecordId id, SegmentBuilder builder) { - builder.writeLong((length - Segment.MEDIUM_LIMIT) | (0x3L << 62)); - builder.writeRecordId(blocks); - } - }); + long len = (length - Segment.MEDIUM_LIMIT) | (0x3L << 62); + return writeRecord(new SingleValueWriter(blocks, len)); } private RecordId writeValueRecord(final int length, final byte[] data) { checkArgument(length < Segment.MEDIUM_LIMIT); - RecordId id; - if (length < Segment.SMALL_LIMIT) { - return writeRecord(new RecordWriter(VALUE, 1 + length) { - @Override - protected void write(RecordId id, SegmentBuilder builder) { - builder.writeByte((byte) length); - builder.writeBytes(data, 0, length); - } - }); - } else { - return writeRecord(new RecordWriter(VALUE, 2 + length) { - @Override - protected void write(RecordId id, SegmentBuilder builder) { - builder.writeShort((short) ((length - Segment.SMALL_LIMIT) | 0x8000)); - builder.writeBytes(data, 0, length); - } - }); - } + return writeRecord(new ByteValueWriter(length, data)); } /** @@ -459,7 +386,7 @@ public class SegmentWriter { return id; // shortcut if the same string was recently stored } - byte[] data = string.getBytes(Charsets.UTF_8); + byte[] data = string.getBytes(UTF_8); if (data.length < Segment.MEDIUM_LIMIT) { // only cache short strings to avoid excessive memory use @@ -521,7 +448,7 @@ public class SegmentWriter { * @see Segment#BLOB_ID_SMALL_LIMIT */ private RecordId writeBlobId(String blobId) { - byte[] data = blobId.getBytes(Charsets.UTF_8); + byte[] data = blobId.getBytes(UTF_8); if (data.length < Segment.BLOB_ID_SMALL_LIMIT) { return writeSmallBlobId(data); } else { @@ -538,17 +465,7 @@ public class SegmentWriter { * @return A record ID pointing to the written blob ID. */ private RecordId writeLargeBlobId(String blobId) { - final RecordId stringRecord = writeString(blobId); - return writeRecord(new RecordWriter(VALUE, 1, stringRecord) { - @Override - protected void write(RecordId id, SegmentBuilder builder) { - // The length uses a fake "length" field that is always equal to 0xF0. - // This allows the code to take apart small from a large blob IDs. - builder.writeByte((byte) 0xF0); - builder.writeRecordId(stringRecord); - builder.addBlobRef(id); - } - }); + return writeRecord(new LargeBlobWriter(writeString(blobId))); } /** @@ -561,14 +478,7 @@ public class SegmentWriter { private RecordId writeSmallBlobId(final byte[] blobId) { final int length = blobId.length; checkArgument(length < Segment.BLOB_ID_SMALL_LIMIT); - return writeRecord(new RecordWriter(VALUE, 2 + length) { - @Override - protected void write(RecordId id, SegmentBuilder builder) { - builder.writeShort((short) (length | 0xE000)); - builder.writeBytes(blobId, 0, length); - builder.addBlobRef(id); - } - }); + return writeRecord(new SmallBlobIdWriter(blobId)); } /** @@ -582,12 +492,7 @@ public class SegmentWriter { RecordId writeBlock(final byte[] bytes, final int offset, final int length) { checkNotNull(bytes); checkPositionIndexes(offset, offset + length, bytes.length); - return writeRecord(new RecordWriter(BLOCK, length) { - @Override - protected void write(RecordId id, SegmentBuilder builder) { - builder.writeBytes(bytes, offset, length); - } - }); + return writeRecord(new BlockWriter(bytes, offset, length)); } SegmentBlob writeExternalBlob(String blobId) { @@ -618,7 +523,7 @@ public class SegmentWriter { threw = false; return new SegmentBlob(id); } finally { - Closeables.close(stream, threw); + close(stream, threw); } } @@ -626,7 +531,7 @@ public class SegmentWriter { throws IOException { BlobStore blobStore = store.getBlobStore(); byte[] data = new byte[MAX_SEGMENT_SIZE]; - int n = ByteStreams.read(stream, data, 0, data.length); + int n = read(stream, data, 0, data.length); // Special case for short binaries (up to about 16kB): // store them directly as small- or medium-sized value records @@ -653,7 +558,7 @@ public class SegmentWriter { blockIds.add(new RecordId(bulkId, data.length - len + i)); } - n = ByteStreams.read(stream, data, 0, data.length); + n = read(stream, data, 0, data.length); length += n; } @@ -674,13 +579,13 @@ public class SegmentWriter { if (type.tag() == PropertyType.BINARY) { try { SegmentBlob blob = - writeBlob(state.getValue(Type.BINARY, i)); + writeBlob(state.getValue(BINARY, i)); valueIds.add(blob.getRecordId()); } catch (IOException e) { throw new IllegalStateException("Unexpected IOException", e); } } else { - String value = state.getValue(Type.STRING, i); + String value = state.getValue(STRING, i); RecordId valueId = previousValues.get(value); if (valueId == null) { valueId = writeString(value); @@ -692,21 +597,9 @@ public class SegmentWriter { if (!type.isArray()) { return valueIds.iterator().next(); } else if (count == 0) { - return writeRecord(new RecordWriter(LIST, 4) { - @Override - protected void write(RecordId id, SegmentBuilder builder) { - builder.writeInt(0); - } - }); + return writeRecord(new ListWriter()); } else { - final RecordId listId = writeList(valueIds); - return writeRecord(new RecordWriter(LIST, 4, listId) { - @Override - public void write(RecordId id, SegmentBuilder builder) { - builder.writeInt(count); - builder.writeRecordId(listId); - } - }); + return writeRecord(new ListWriter(count, writeList(valueIds))); } } @@ -771,54 +664,21 @@ public class SegmentWriter { RecordId propNamesId = null; if (version.onOrAfter(V_11)) { if (propertyNames.length > 0) { - propNamesId = writeList(Arrays.asList(propertyNames)); + propNamesId = writeList(asList(propertyNames)); ids.add(propNamesId); } } else { - ids.addAll(Arrays.asList(propertyNames)); + ids.addAll(asList(propertyNames)); } checkState(propertyNames.length < (1 << 18)); head |= propertyNames.length; - return writeTemplate(template, ids, propertyNames, propertyTypes, head, primaryId, - mixinIds, childNameId, propNamesId); - } - public RecordId writeTemplate(Template template, final Collection ids, - final RecordId[] propertyNames, final byte[] propertyTypes, final int finalHead, - final RecordId finalPrimaryId, final List finalMixinIds, final RecordId - finalChildNameId, final RecordId finalPropNamesId) { - RecordId id = writeRecord(new RecordWriter(TEMPLATE, 4 + propertyTypes.length, ids) { - @Override - protected void write(RecordId id, SegmentBuilder builder) { - builder.writeInt(finalHead); - if (finalPrimaryId != null) { - builder.writeRecordId(finalPrimaryId); - } - if (finalMixinIds != null) { - for (RecordId mixinId : finalMixinIds) { - builder.writeRecordId(mixinId); - } - } - if (finalChildNameId != null) { - builder.writeRecordId(finalChildNameId); - } - if (version.onOrAfter(V_11)) { - if (finalPropNamesId != null) { - builder.writeRecordId(finalPropNamesId); - } - } - for (int i = 0; i < propertyNames.length; i++) { - if (!version.onOrAfter(V_11)) { - // V10 only - builder.writeRecordId(propertyNames[i]); - } - builder.writeByte(propertyTypes[i]); - } - } - }); - records.put(template, id); - return id; + RecordId tid = writeRecord(new TemplateWriter(ids, propertyNames, + propertyTypes, head, primaryId, mixinIds, childNameId, + propNamesId, version)); + records.put(template, tid); + return tid; } public SegmentNodeState writeNode(NodeState state) { @@ -860,7 +720,7 @@ public class SegmentWriter { String childName = template.getChildName(); if (childName == Template.MANY_CHILD_NODES) { MapRecord base; - final Map childNodes = Maps.newHashMap(); + final Map childNodes = newHashMap(); if (before != null && before.getChildNodeCount(2) > 1 && after.getChildNodeCount(2) > 1) { @@ -934,16 +794,7 @@ public class SegmentWriter { ids.addAll(pIds); } } - - RecordId recordId = writeRecord(new RecordWriter(NODE, 0, ids) { - @Override - protected void write(RecordId id, SegmentBuilder builder) { - for (RecordId recordId : ids) { - builder.writeRecordId(recordId); - } - } - }); - return new SegmentNodeState(recordId); + return writeRecord(new NodeStateWriter(ids)); } /** @@ -963,39 +814,15 @@ public class SegmentWriter { } } - private RecordId writeRecord(RecordWriter recordWriter) { + private T writeRecord(RecordWriter recordWriter) { SegmentBuilder builder = segmentBuilderPool.borrowBuilder(); try { - RecordId id = builder.prepare(recordWriter.type, recordWriter.size, recordWriter.ids); - recordWriter.write(id, builder); - return id; + return recordWriter.write(builder); } finally { segmentBuilderPool.returnBuilder(builder); } } - private abstract static class RecordWriter { - private final RecordType type; - private final int size; - private final Collection ids; - - protected RecordWriter(RecordType type, int size, Collection ids) { - this.type = type; - this.size = size; - this.ids = ids; - } - - protected RecordWriter(RecordType type, int size, RecordId id) { - this(type, size, singleton(id)); - } - - protected RecordWriter(RecordType type, int size) { - this(type, size, Collections.emptyList()); - } - - protected abstract void write(RecordId id, SegmentBuilder builder); - } - private class SegmentBuilderPool { private final Set borrowed = newHashSet(); private final Map builders = newHashMap();