Index: src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java (revision 1441859) +++ src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java (working copy) @@ -23,6 +23,8 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.SamePrefixComparator; +import org.apache.hadoop.hbase.regionserver.MemStoreLAB; +import org.apache.hadoop.hbase.regionserver.MemStoreLAB.Allocation; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.RawComparator; @@ -113,15 +115,18 @@ protected ByteBuffer currentBuffer; protected STATE current = createSeekerState(); // always valid protected STATE previous = createSeekerState(); // may not be valid + protected MemStoreLAB allocator; + @SuppressWarnings("unchecked") - public BufferedEncodedSeeker(RawComparator comparator) { + public BufferedEncodedSeeker(RawComparator comparator, int uncompressedSize) { this.comparator = comparator; if (comparator instanceof SamePrefixComparator) { this.samePrefixComparator = (SamePrefixComparator) comparator; } else { this.samePrefixComparator = null; } + allocator = new MemStoreLAB(uncompressedSize, uncompressedSize); } @Override @@ -133,8 +138,15 @@ @Override public ByteBuffer getKeyDeepCopy() { - ByteBuffer keyBuffer = ByteBuffer.allocate(current.keyLength); - keyBuffer.put(current.keyBuffer, 0, current.keyLength); + Allocation a = allocator.allocateBytes(current.keyLength); + ByteBuffer keyBuffer; + if (a == null) { + keyBuffer = ByteBuffer.allocate(current.keyLength); + keyBuffer.put(current.keyBuffer, 0, current.keyLength); + } else { + keyBuffer = ByteBuffer.wrap(a.getData(), a.getOffset(), current.keyLength); + System.arraycopy(current.keyBuffer, 0, a.getData(), a.getOffset(), current.keyLength); + } return keyBuffer; } @@ -147,14 +159,26 @@ @Override public ByteBuffer getKeyValueBuffer() { - ByteBuffer kvBuffer = ByteBuffer.allocate( - 2 * Bytes.SIZEOF_INT + current.keyLength + current.valueLength); + int len = 2 * Bytes.SIZEOF_INT + current.keyLength + current.valueLength; + Allocation a = allocator.allocateBytes(len); + ByteBuffer kvBuffer; + if (a == null) { + kvBuffer = ByteBuffer.allocate(len); + } else { + kvBuffer = ByteBuffer.wrap(a.getData(), a.getOffset(), len); + } kvBuffer.putInt(current.keyLength); kvBuffer.putInt(current.valueLength); - kvBuffer.put(current.keyBuffer, 0, current.keyLength); - kvBuffer.put(currentBuffer.array(), - currentBuffer.arrayOffset() + current.valueOffset, - current.valueLength); + if (a == null) { + kvBuffer.put(current.keyBuffer, 0, current.keyLength); + kvBuffer.put(currentBuffer.array(), + currentBuffer.arrayOffset() + current.valueOffset, + current.valueLength); + } else { + System.arraycopy(current.keyBuffer, 0, a.getData(), a.getOffset() + 2 * Bytes.SIZEOF_INT, current.keyLength); + System.arraycopy(currentBuffer.array(), + currentBuffer.arrayOffset() + current.valueOffset, a.getData(), a.getOffset() + 2 * Bytes.SIZEOF_INT + current.keyLength, current.valueLength); + } return kvBuffer; } Index: src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java (revision 1441859) +++ src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java (working copy) @@ -66,8 +66,8 @@ @Override public EncodedSeeker createSeeker(RawComparator comparator, - final boolean includesMemstoreTS) { - return new BufferedEncodedSeeker(comparator) { + final boolean includesMemstoreTS, int uncompressedSize) { + return new BufferedEncodedSeeker(comparator, uncompressedSize) { @Override protected void decodeNext() { current.keyLength = currentBuffer.getInt(); Index: src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java (revision 1441859) +++ src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java (working copy) @@ -89,7 +89,7 @@ * @return A newly created seeker. */ public EncodedSeeker createSeeker(RawComparator comparator, - boolean includesMemstoreTS); + boolean includesMemstoreTS, int uncompressedSize); /** * An interface which enable to seek while underlying data is encoded. Index: src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java (revision 1441859) +++ src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java (working copy) @@ -421,8 +421,8 @@ @Override public EncodedSeeker createSeeker(RawComparator comparator, - final boolean includesMemstoreTS) { - return new BufferedEncodedSeeker(comparator) { + final boolean includesMemstoreTS, int uncompressedSize) { + return new BufferedEncodedSeeker(comparator, uncompressedSize) { private byte[] familyNameWithSize; private static final int TIMESTAMP_WITH_TYPE_LENGTH = Bytes.SIZEOF_LONG + Bytes.SIZEOF_BYTE; Index: src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java (revision 1441859) +++ src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java (working copy) @@ -417,8 +417,8 @@ @Override public EncodedSeeker createSeeker(RawComparator comparator, - final boolean includesMemstoreTS) { - return new BufferedEncodedSeeker(comparator) { + final boolean includesMemstoreTS, int uncompressedSize) { + return new BufferedEncodedSeeker(comparator, uncompressedSize) { private void decode(boolean isFirst) { byte flag = currentBuffer.get(); if ((flag & FLAG_SAME_KEY_LENGTH) == 0) { Index: src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java (revision 1441859) +++ src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java (working copy) @@ -164,8 +164,8 @@ @Override public EncodedSeeker createSeeker(RawComparator comparator, - final boolean includesMemstoreTS) { - return new BufferedEncodedSeeker(comparator) { + final boolean includesMemstoreTS, int uncompressedSize) { + return new BufferedEncodedSeeker(comparator, uncompressedSize) { @Override protected void decodeNext() { current.keyLength = ByteBufferUtils.readCompressedInt(currentBuffer); Index: src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java (revision 1441859) +++ src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java (working copy) @@ -932,10 +932,10 @@ this.includesMemstoreTS = includesMemstoreTS; } - private void setDataBlockEncoder(DataBlockEncoder dataBlockEncoder) { + private void setDataBlockEncoder(DataBlockEncoder dataBlockEncoder, int uncompressedSize) { this.dataBlockEncoder = dataBlockEncoder; seeker = dataBlockEncoder.createSeeker(reader.getComparator(), - includesMemstoreTS); + includesMemstoreTS, uncompressedSize); } /** @@ -959,7 +959,7 @@ dataBlockEncoderId)) { DataBlockEncoder encoder = DataBlockEncoding.getDataBlockEncoderById(dataBlockEncoderId); - setDataBlockEncoder(encoder); + setDataBlockEncoder(encoder, newBlock.getUncompressedSizeWithoutHeader()); } seeker.setCurrentBuffer(getEncodedBuffer(newBlock)); Index: src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java (revision 1441859) +++ src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java (working copy) @@ -62,9 +62,14 @@ } public MemStoreLAB(Configuration conf) { - chunkSize = conf.getInt(CHUNK_SIZE_KEY, CHUNK_SIZE_DEFAULT); - maxAlloc = conf.getInt(MAX_ALLOC_KEY, MAX_ALLOC_DEFAULT); + this(conf.getInt(CHUNK_SIZE_KEY, CHUNK_SIZE_DEFAULT), + conf.getInt(MAX_ALLOC_KEY, MAX_ALLOC_DEFAULT)); + } + public MemStoreLAB(int chunkSize, int maxAlloc) { + this.chunkSize = chunkSize; + this.maxAlloc = maxAlloc; + // if we don't exclude allocations >CHUNK_SIZE, we'd infiniteloop on one! Preconditions.checkArgument( maxAlloc <= chunkSize, @@ -262,11 +267,11 @@ ", off=" + offset + ")"; } - byte[] getData() { + public byte[] getData() { return data; } - int getOffset() { + public int getOffset() { return offset; } }