.../java/org/apache/hadoop/hbase/nio/ByteBuff.java | 55 +++++ .../codec/prefixtree/PrefixTreeBlockMeta.java | 101 ++++----- .../hbase/codec/prefixtree/PrefixTreeCodec.java | 9 +- .../hbase/codec/prefixtree/PrefixTreeSeeker.java | 246 ++++++++++++++++++++- .../codec/prefixtree/decode/ArraySearcherPool.java | 4 +- .../codec/prefixtree/decode/DecoderFactory.java | 17 +- .../prefixtree/decode/PrefixTreeArrayScanner.java | 12 +- .../codec/prefixtree/decode/PrefixTreeCell.java | 107 +++++++-- .../prefixtree/decode/column/ColumnNodeReader.java | 11 +- .../prefixtree/decode/column/ColumnReader.java | 3 +- .../codec/prefixtree/decode/row/RowNodeReader.java | 28 ++- .../decode/timestamp/MvccVersionDecoder.java | 5 +- .../decode/timestamp/TimestampDecoder.java | 5 +- .../codec/prefixtree/encode/row/RowNodeWriter.java | 2 +- .../apache/hadoop/hbase/util/vint/UFIntTool.java | 11 + .../apache/hadoop/hbase/util/vint/UVIntTool.java | 15 ++ .../apache/hadoop/hbase/util/vint/UVLongTool.java | 15 ++ .../codec/prefixtree/blockmeta/TestBlockMeta.java | 3 +- .../codec/prefixtree/column/TestColumnBuilder.java | 4 +- .../prefixtree/row/TestPrefixTreeSearcher.java | 10 +- .../hbase/codec/prefixtree/row/TestRowEncoder.java | 12 +- .../prefixtree/timestamp/TestTimestampEncoder.java | 4 +- 22 files changed, 538 insertions(+), 141 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/ByteBuff.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/ByteBuff.java index 9a6041f..e43901e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/ByteBuff.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/ByteBuff.java @@ -454,4 +454,59 @@ public abstract class ByteBuff { } return (WritableUtils.isNegativeVInt(firstByte) ? (i ^ -1L) : i); } + + /** + * Search sorted array "a" for byte "key". I can't remember if I wrote this or + * copied it from somewhere. (mcorgan) + * + * @param a + * Array to search. Entries must be sorted and unique. + * @param fromIndex + * First index inclusive of "a" to include in the search. + * @param toIndex + * Last index exclusive of "a" to include in the search. + * @param key + * The byte to search for. + * @return The index of key if found. If not found, return -(index + 1), where + * negative indicates "not found" and the "index + 1" handles the "-0" + * case. + */ + public static int unsignedBinarySearch(ByteBuff a, int fromIndex, int toIndex, byte key) { + int unsignedKey = key & 0xff; + int low = fromIndex; + int high = toIndex - 1; + + while (low <= high) { + int mid = (low + high) >>> 1; + int midVal = a.get(mid) & 0xff; + + if (midVal < unsignedKey) { + low = mid + 1; + } else if (midVal > unsignedKey) { + high = mid - 1; + } else { + return mid; // key found + } + } + return -(low + 1); // key not found. + } + + public static String toStringBinary(final ByteBuff b, int off, int len) { + StringBuilder result = new StringBuilder(); + // Just in case we are passed a 'len' that is > buffer length... + if (off >= b.capacity()) + return result.toString(); + if (off + len > b.capacity()) + len = b.capacity() - off; + for (int i = off; i < off + len; ++i) { + int ch = b.get(i) & 0xFF; + if ((ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'Z') || (ch >= 'a' && ch <= 'z') + || " `~!@#$%^&*()-_=+[]{}|;:'\",.<>/?".indexOf(ch) >= 0) { + result.append((char) ch); + } else { + result.append(String.format("\\x%02X", ch)); + } + } + return result.toString(); + } } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeBlockMeta.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeBlockMeta.java index 8410cf3..b66402f 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeBlockMeta.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeBlockMeta.java @@ -21,10 +21,10 @@ package org.apache.hadoop.hbase.codec.prefixtree; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.codec.prefixtree.encode.other.LongEncoder; +import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.vint.UVIntTool; import org.apache.hadoop.hbase.util.vint.UVLongTool; @@ -56,8 +56,6 @@ public class PrefixTreeBlockMeta { /**************** transient fields *********************/ - - protected int arrayOffset; protected int bufferOffset; @@ -116,7 +114,6 @@ public class PrefixTreeBlockMeta { public PrefixTreeBlockMeta(InputStream is) throws IOException{ this.version = VERSION; - this.arrayOffset = 0; this.bufferOffset = 0; readVariableBytesFromInputStream(is); } @@ -124,14 +121,13 @@ public class PrefixTreeBlockMeta { /** * @param buffer positioned at start of PtBlockMeta */ - public PrefixTreeBlockMeta(ByteBuffer buffer) { + public PrefixTreeBlockMeta(ByteBuff buffer) { initOnBlock(buffer); } - public void initOnBlock(ByteBuffer buffer) { - arrayOffset = buffer.arrayOffset(); + public void initOnBlock(ByteBuff buffer) { bufferOffset = buffer.position(); - readVariableBytesFromArray(buffer.array(), arrayOffset + bufferOffset); + readVariableBytesFromArray(buffer, bufferOffset); } @@ -263,79 +259,79 @@ public class PrefixTreeBlockMeta { numUniqueTags = UVIntTool.getInt(is); } - public void readVariableBytesFromArray(byte[] bytes, int offset) { + public void readVariableBytesFromArray(ByteBuff buf, int offset) { int position = offset; - version = UVIntTool.getInt(bytes, position); + version = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(version); - numMetaBytes = UVIntTool.getInt(bytes, position); + numMetaBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numMetaBytes); - numKeyValueBytes = UVIntTool.getInt(bytes, position); + numKeyValueBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numKeyValueBytes); - setIncludesMvccVersion(bytes[position]); + setIncludesMvccVersion(buf.get(position)); ++position; - numRowBytes = UVIntTool.getInt(bytes, position); + numRowBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numRowBytes); - numFamilyBytes = UVIntTool.getInt(bytes, position); + numFamilyBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numFamilyBytes); - numQualifierBytes = UVIntTool.getInt(bytes, position); + numQualifierBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numQualifierBytes); - numTagsBytes = UVIntTool.getInt(bytes, position); + numTagsBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numTagsBytes); - numTimestampBytes = UVIntTool.getInt(bytes, position); + numTimestampBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numTimestampBytes); - numMvccVersionBytes = UVIntTool.getInt(bytes, position); + numMvccVersionBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numMvccVersionBytes); - numValueBytes = UVIntTool.getInt(bytes, position); + numValueBytes = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numValueBytes); - nextNodeOffsetWidth = UVIntTool.getInt(bytes, position); + nextNodeOffsetWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(nextNodeOffsetWidth); - familyOffsetWidth = UVIntTool.getInt(bytes, position); + familyOffsetWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(familyOffsetWidth); - qualifierOffsetWidth = UVIntTool.getInt(bytes, position); + qualifierOffsetWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(qualifierOffsetWidth); - tagsOffsetWidth = UVIntTool.getInt(bytes, position); + tagsOffsetWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(tagsOffsetWidth); - timestampIndexWidth = UVIntTool.getInt(bytes, position); + timestampIndexWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(timestampIndexWidth); - mvccVersionIndexWidth = UVIntTool.getInt(bytes, position); + mvccVersionIndexWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(mvccVersionIndexWidth); - valueOffsetWidth = UVIntTool.getInt(bytes, position); + valueOffsetWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(valueOffsetWidth); - valueLengthWidth = UVIntTool.getInt(bytes, position); + valueLengthWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(valueLengthWidth); - rowTreeDepth = UVIntTool.getInt(bytes, position); + rowTreeDepth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(rowTreeDepth); - maxRowLength = UVIntTool.getInt(bytes, position); + maxRowLength = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(maxRowLength); - maxQualifierLength = UVIntTool.getInt(bytes, position); + maxQualifierLength = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(maxQualifierLength); - maxTagsLength = UVIntTool.getInt(bytes, position); + maxTagsLength = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(maxTagsLength); - minTimestamp = UVLongTool.getLong(bytes, position); + minTimestamp = UVLongTool.getLong(buf, position); position += UVLongTool.numBytes(minTimestamp); - timestampDeltaWidth = UVIntTool.getInt(bytes, position); + timestampDeltaWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(timestampDeltaWidth); - minMvccVersion = UVLongTool.getLong(bytes, position); + minMvccVersion = UVLongTool.getLong(buf, position); position += UVLongTool.numBytes(minMvccVersion); - mvccVersionDeltaWidth = UVIntTool.getInt(bytes, position); + mvccVersionDeltaWidth = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(mvccVersionDeltaWidth); - setAllSameType(bytes[position]); + setAllSameType(buf.get(position)); ++position; - allTypes = bytes[position]; + allTypes = buf.get(position); ++position; - numUniqueRows = UVIntTool.getInt(bytes, position); + numUniqueRows = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numUniqueRows); - numUniqueFamilies = UVIntTool.getInt(bytes, position); + numUniqueFamilies = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numUniqueFamilies); - numUniqueQualifiers = UVIntTool.getInt(bytes, position); + numUniqueQualifiers = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numUniqueQualifiers); - numUniqueTags = UVIntTool.getInt(bytes, position); + numUniqueTags = UVIntTool.getInt(buf, position); position += UVIntTool.numBytes(numUniqueTags); } @@ -405,8 +401,6 @@ public class PrefixTreeBlockMeta { return false; if (allTypes != other.allTypes) return false; - if (arrayOffset != other.arrayOffset) - return false; if (bufferOffset != other.bufferOffset) return false; if (valueLengthWidth != other.valueLengthWidth) @@ -483,7 +477,6 @@ public class PrefixTreeBlockMeta { int result = 1; result = prime * result + (allSameType ? 1231 : 1237); result = prime * result + allTypes; - result = prime * result + arrayOffset; result = prime * result + bufferOffset; result = prime * result + valueLengthWidth; result = prime * result + valueOffsetWidth; @@ -525,9 +518,7 @@ public class PrefixTreeBlockMeta { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("PtBlockMeta [arrayOffset="); - builder.append(arrayOffset); - builder.append(", bufferOffset="); + builder.append("PtBlockMeta [bufferOffset="); builder.append(bufferOffset); builder.append(", version="); builder.append(version); @@ -602,12 +593,8 @@ public class PrefixTreeBlockMeta { /************** absolute getters *******************/ - public int getAbsoluteMetaOffset() { - return arrayOffset + bufferOffset; - } - public int getAbsoluteRowOffset() { - return getAbsoluteMetaOffset() + numMetaBytes; + return getBufferOffset() + numMetaBytes; } public int getAbsoluteFamilyOffset() { @@ -749,14 +736,6 @@ public class PrefixTreeBlockMeta { this.numMetaBytes = numMetaBytes; } - public int getArrayOffset() { - return arrayOffset; - } - - public void setArrayOffset(int arrayOffset) { - this.arrayOffset = arrayOffset; - } - public int getBufferOffset() { return bufferOffset; } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java index 29f4811..1efee96 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java @@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext; import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.nio.ByteBuff; +import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.io.WritableUtils; @@ -83,7 +84,7 @@ public class PrefixTreeCodec implements DataBlockEncoder { int skipLastBytes, HFileBlockDecodingContext decodingCtx) throws IOException { ByteBuffer sourceAsBuffer = ByteBufferUtils.drainInputStreamToBuffer(source);// waste sourceAsBuffer.mark(); - PrefixTreeBlockMeta blockMeta = new PrefixTreeBlockMeta(sourceAsBuffer); + PrefixTreeBlockMeta blockMeta = new PrefixTreeBlockMeta(new SingleByteBuff(sourceAsBuffer)); sourceAsBuffer.rewind(); int numV1BytesWithHeader = allocateHeaderLength + blockMeta.getNumKeyValueBytes(); byte[] keyValueBytesWithHeader = new byte[numV1BytesWithHeader]; @@ -92,7 +93,7 @@ public class PrefixTreeCodec implements DataBlockEncoder { CellSearcher searcher = null; try { boolean includesMvcc = decodingCtx.getHFileContext().isIncludesMvcc(); - searcher = DecoderFactory.checkOut(sourceAsBuffer, includesMvcc); + searcher = DecoderFactory.checkOut(new SingleByteBuff(sourceAsBuffer), includesMvcc); while (searcher.advance()) { KeyValue currentCell = KeyValueUtil.copyToNewKeyValue(searcher.current()); // needs to be modified for DirectByteBuffers. no existing methods to @@ -121,9 +122,7 @@ public class PrefixTreeCodec implements DataBlockEncoder { PrefixTreeArraySearcher searcher = null; try { // should i includeMemstoreTS (second argument)? i think PrefixKeyDeltaEncoder is, so i will - // TODO : Change to work with BBs - searcher = DecoderFactory.checkOut(block.asSubByteBuffer(block.limit() - block.position()), - true); + searcher = DecoderFactory.checkOut(block, true); if (!searcher.positionAtFirstCell()) { return null; } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java index d77bb24..685f8bf 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.codec.prefixtree; import java.nio.ByteBuffer; +import org.apache.hadoop.hbase.ByteBufferedCell; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; @@ -33,6 +34,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker; import org.apache.hadoop.hbase.nio.ByteBuff; +import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; @@ -57,9 +59,7 @@ public class PrefixTreeSeeker implements EncodedSeeker { @Override public void setCurrentBuffer(ByteBuff fullBlockBuffer) { - block = fullBlockBuffer.asSubByteBuffer(fullBlockBuffer.limit()); - // TODO : change to Bytebuff - ptSearcher = DecoderFactory.checkOut(block, includeMvccVersion); + ptSearcher = DecoderFactory.checkOut(fullBlockBuffer, includeMvccVersion); rewind(); } @@ -99,16 +99,26 @@ public class PrefixTreeSeeker implements EncodedSeeker { */ @Override public Cell getCell() { - Cell cell = ptSearcher.current(); + ByteBufferedCell cell = (ByteBufferedCell)ptSearcher.current(); if (cell == null) { return null; } - return new ClonedPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), - cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), - cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(), - cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), cell.getTagsArray(), - cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(), cell.getTypeByte(), - cell.getSequenceId()); + // Use the ByteBuffered cell to see if the Cell is onheap or offheap + if (cell.getValueByteBuffer().hasArray()) { + return new OnheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), + cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), + cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(), + cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), cell.getTagsArray(), + cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(), cell.getTypeByte(), + cell.getSequenceId()); + } else { + return new OffheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), + cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), + cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(), + cell.getValueByteBuffer(), cell.getValuePositionInByteBuffer(), cell.getValueLength(), + cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(), + cell.getTypeByte(), cell.getSequenceId()); + } } /** @@ -208,12 +218,13 @@ public class PrefixTreeSeeker implements EncodedSeeker { return comparator.compare(key, ptSearcher.current()); } + /** * Cloned version of the PrefixTreeCell where except the value part, the rest * of the key part is deep copied * */ - private static class ClonedPrefixTreeCell implements Cell, SettableSequenceId, HeapSize { + private static class OnheapPrefixTreeCell implements Cell, SettableSequenceId, HeapSize { private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT) + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.ARRAY)); @@ -232,7 +243,7 @@ public class PrefixTreeSeeker implements EncodedSeeker { private long seqId; private byte type; - public ClonedPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam, + public OnheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam, int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, byte[] val, int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type, long seqId) { @@ -367,4 +378,215 @@ public class PrefixTreeSeeker implements EncodedSeeker { return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength; } } + + private static class OffheapPrefixTreeCell extends ByteBufferedCell implements Cell, + SettableSequenceId, HeapSize { + private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT + + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT) + + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.BYTE_BUFFER)); + private ByteBuffer rowBuff; + private short rowLength; + private ByteBuffer famBuff; + private byte famLength; + private ByteBuffer qualBuff; + private int qualLength; + private ByteBuffer val; + private int valOffset; + private int valLength; + private ByteBuffer tagBuff; + private int tagsLength; + private long ts; + private long seqId; + private byte type; + public OffheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam, + int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, ByteBuffer val, + int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type, + long seqId) { + byte[] tmpRow = new byte[rowLength]; + System.arraycopy(row, rowOffset, tmpRow, 0, rowLength); + this.rowBuff = ByteBuffer.wrap(tmpRow); + this.rowLength = rowLength; + byte[] tmpFam = new byte[famLength]; + System.arraycopy(fam, famOffset, tmpFam, 0, famLength); + this.famBuff = ByteBuffer.wrap(tmpFam); + this.famLength = famLength; + byte[] tmpQual = new byte[qualLength]; + System.arraycopy(qual, qualOffset, tmpQual, 0, qualLength); + this.qualBuff = ByteBuffer.wrap(tmpQual); + this.qualLength = qualLength; + byte[] tmpTag = new byte[tagLength]; + System.arraycopy(tag, tagOffset, tmpTag, 0, tagLength); + this.tagBuff = ByteBuffer.wrap(tmpTag); + this.tagsLength = tagLength; + this.val = val; + this.valLength = valLength; + this.valOffset = valOffset; + this.ts = ts; + this.seqId = seqId; + this.type = type; + } + + @Override + public void setSequenceId(long seqId) { + this.seqId = seqId; + } + + @Override + public byte[] getRowArray() { + return this.rowBuff.array(); + } + + @Override + public int getRowOffset() { + return getRowPositionInByteBuffer(); + } + + @Override + public short getRowLength() { + return this.rowLength; + } + + @Override + public byte[] getFamilyArray() { + return this.famBuff.array(); + } + + @Override + public int getFamilyOffset() { + return getFamilyPositionInByteBuffer(); + } + + @Override + public byte getFamilyLength() { + return this.famLength; + } + + @Override + public byte[] getQualifierArray() { + return this.qualBuff.array(); + } + + @Override + public int getQualifierOffset() { + return getQualifierPositionInByteBuffer(); + } + + @Override + public int getQualifierLength() { + return this.qualLength; + } + + @Override + public long getTimestamp() { + return ts; + } + + @Override + public byte getTypeByte() { + return type; + } + + @Override + public long getSequenceId() { + return seqId; + } + + @Override + public byte[] getValueArray() { + byte[] tmpVal = new byte[valLength]; + ByteBufferUtils.copyFromBufferToArray(tmpVal, val, valOffset, 0, valLength); + return tmpVal; + } + + @Override + public int getValueOffset() { + return 0; + } + + @Override + public int getValueLength() { + return this.valLength; + } + + @Override + public byte[] getTagsArray() { + return this.tagBuff.array(); + } + + @Override + public int getTagsOffset() { + return getTagsPositionInByteBuffer(); + } + + @Override + public int getTagsLength() { + return this.tagsLength; + } + + @Override + public ByteBuffer getRowByteBuffer() { + return this.rowBuff; + } + + @Override + public int getRowPositionInByteBuffer() { + return 0; + } + + @Override + public ByteBuffer getFamilyByteBuffer() { + return this.famBuff; + } + + @Override + public int getFamilyPositionInByteBuffer() { + return 0; + } + + @Override + public ByteBuffer getQualifierByteBuffer() { + return this.qualBuff; + } + + @Override + public int getQualifierPositionInByteBuffer() { + return 0; + } + + @Override + public ByteBuffer getTagsByteBuffer() { + return this.tagBuff; + } + + @Override + public int getTagsPositionInByteBuffer() { + return 0; + } + + @Override + public ByteBuffer getValueByteBuffer() { + return this.val; + } + + @Override + public int getValuePositionInByteBuffer() { + return this.valOffset; + } + + @Override + public long heapSize() { + return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength; + } + + @Override + public String toString() { + String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength()); + String family = Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength()); + String qualifier = Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(), + getQualifierLength()); + String timestamp = String.valueOf((getTimestamp())); + return row + "/" + family + (family != null && family.length() > 0 ? ":" : "") + qualifier + + "/" + timestamp + "/" + Type.codeToType(type); + } + } } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/ArraySearcherPool.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/ArraySearcherPool.java index f0b249f..e6df88a 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/ArraySearcherPool.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/ArraySearcherPool.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hbase.codec.prefixtree.decode; -import java.nio.ByteBuffer; import java.util.Queue; import java.util.concurrent.LinkedBlockingQueue; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.nio.ByteBuff; /** *
@@ -45,7 +45,7 @@ public class ArraySearcherPool {
protected Queue