.../hadoop/hbase/ByteBufferedKeyOnlyKeyValue.java | 225 +++++++++++++++++ .../hadoop/hbase/OffheapKeyOnlyKeyValue.java | 210 ---------------- .../org/apache/hadoop/hbase/OffheapKeyValue.java | 278 +++++++++++++++++++++ .../io/encoding/BufferedDataBlockEncoder.java | 44 +--- .../apache/hadoop/hbase/util/ByteBufferUtils.java | 50 ++++ .../apache/hadoop/hbase/TestOffheapKeyValue.java | 205 +++++++++++++++ .../hadoop/hbase/io/hfile/HFileBlockIndex.java | 8 +- .../hadoop/hbase/io/hfile/HFileReaderImpl.java | 148 ++++++----- .../hbase/io/hfile/bucket/ByteBufferIOEngine.java | 10 +- .../io/hfile/bucket/TestByteBufferIOEngine.java | 4 - 10 files changed, 859 insertions(+), 323 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferedKeyOnlyKeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferedKeyOnlyKeyValue.java new file mode 100644 index 0000000..bb48a5f --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferedKeyOnlyKeyValue.java @@ -0,0 +1,225 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import java.nio.ByteBuffer; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.util.ByteBufferUtils; +import org.apache.hadoop.hbase.util.Bytes; + +/** + * This is a key only Cell implementation which is identical to {@link KeyValue.KeyOnlyKeyValue} + * with respect to key serialization but have its data in off heap memory. + */ +@InterfaceAudience.Private +public class ByteBufferedKeyOnlyKeyValue extends ByteBufferedCell { + + private ByteBuffer buf; + private int offset = 0; // offset into buffer where key starts at + private int length = 0; // length of this. + private short rowLen; + + public ByteBufferedKeyOnlyKeyValue() { + } + + public ByteBufferedKeyOnlyKeyValue(ByteBuffer buf, int offset, int length) { + this.buf = buf; + this.offset = offset; + this.length = length; + this.rowLen = ByteBufferUtils.toShort(this.buf, this.offset); + } + + /** + * A setter that helps to avoid object creation every time and whenever + * there is a need to create new OffheapKeyOnlyKeyValue. + * @param key + * @param offset + * @param length + */ + public void setKey(ByteBuffer key, int offset, int length) { + this.buf = key; + this.offset = offset; + this.length = length; + this.rowLen = ByteBufferUtils.toShort(this.buf, this.offset); + } + + @Override + public byte[] getRowArray() { + return CellUtil.cloneRow(this); + } + + @Override + public int getRowOffset() { + return 0; + } + + @Override + public short getRowLength() { + return this.rowLen; + } + + @Override + public byte[] getFamilyArray() { + return CellUtil.cloneFamily(this); + } + + @Override + public int getFamilyOffset() { + return 0; + } + + @Override + public byte getFamilyLength() { + return getFamilyLength(getFamilyLengthPosition()); + } + + private byte getFamilyLength(int famLenPos) { + return ByteBufferUtils.toByte(this.buf, famLenPos); + } + + @Override + public byte[] getQualifierArray() { + return CellUtil.cloneQualifier(this); + } + + @Override + public int getQualifierOffset() { + return 0; + } + + @Override + public int getQualifierLength() { + return getQualifierLength(getRowLength(), getFamilyLength()); + } + + private int getQualifierLength(int rlength, int flength) { + return this.length - (int) KeyValue.getKeyDataStructureSize(rlength, flength, 0); + } + + @Override + public long getTimestamp() { + return ByteBufferUtils.toLong(this.buf, getTimestampOffset()); + } + + private int getTimestampOffset() { + return this.offset + this.length - KeyValue.TIMESTAMP_TYPE_SIZE; + } + + @Override + public byte getTypeByte() { + return ByteBufferUtils.toByte(this.buf, this.offset + this.length - 1); + } + + @Override + public long getSequenceId() { + return 0; + } + + @Override + public byte[] getValueArray() { + throw new IllegalArgumentException("This is a key only Cell"); + } + + @Override + public int getValueOffset() { + return 0; + } + + @Override + public int getValueLength() { + return 0; + } + + @Override + public byte[] getTagsArray() { + throw new IllegalArgumentException("This is a key only Cell"); + } + + @Override + public int getTagsOffset() { + return 0; + } + + @Override + public int getTagsLength() { + return 0; + } + + @Override + public ByteBuffer getRowByteBuffer() { + return this.buf; + } + + @Override + public int getRowPositionInByteBuffer() { + return this.offset + Bytes.SIZEOF_SHORT; + } + + @Override + public ByteBuffer getFamilyByteBuffer() { + return this.buf; + } + + @Override + public int getFamilyPositionInByteBuffer() { + return getFamilyLengthPosition() + Bytes.SIZEOF_BYTE; + } + + // The position in BB where the family length is added. + private int getFamilyLengthPosition() { + return this.offset + Bytes.SIZEOF_SHORT + getRowLength(); + } + + @Override + public ByteBuffer getQualifierByteBuffer() { + return this.buf; + } + + @Override + public int getQualifierPositionInByteBuffer() { + int famLenPos = getFamilyLengthPosition(); + return famLenPos + Bytes.SIZEOF_BYTE + getFamilyLength(famLenPos); + } + + @Override + public ByteBuffer getValueByteBuffer() { + throw new IllegalArgumentException("This is a key only Cell"); + } + + @Override + public int getValuePositionInByteBuffer() { + return 0; + } + + @Override + public ByteBuffer getTagsByteBuffer() { + throw new IllegalArgumentException("This is a key only Cell"); + } + + @Override + public int getTagsPositionInByteBuffer() { + return 0; + } + + @Override + public String toString() { + return CellUtil.toString(this, false); + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/OffheapKeyOnlyKeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/OffheapKeyOnlyKeyValue.java deleted file mode 100644 index 477d16e..0000000 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/OffheapKeyOnlyKeyValue.java +++ /dev/null @@ -1,210 +0,0 @@ -/** - * Copyright The Apache Software Foundation - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase; - -import java.nio.ByteBuffer; - -import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.util.ByteBufferUtils; -import org.apache.hadoop.hbase.util.Bytes; - -/** - * This is a key only Cell implementation which is identical to {@link KeyValue.KeyOnlyKeyValue} - * with respect to key serialization but have its data in off heap memory. - */ -@InterfaceAudience.Private -public class OffheapKeyOnlyKeyValue extends ByteBufferedCell { - - private ByteBuffer buf; - private int offset = 0; // offset into buffer where key starts at - private int length = 0; // length of this. - private short rowLen; - - public OffheapKeyOnlyKeyValue(ByteBuffer buf, int offset, int length) { - assert buf.isDirect(); - this.buf = buf; - this.offset = offset; - this.length = length; - this.rowLen = ByteBufferUtils.toShort(this.buf, this.offset); - } - - @Override - public byte[] getRowArray() { - return CellUtil.cloneRow(this); - } - - @Override - public int getRowOffset() { - return 0; - } - - @Override - public short getRowLength() { - return this.rowLen; - } - - @Override - public byte[] getFamilyArray() { - return CellUtil.cloneFamily(this); - } - - @Override - public int getFamilyOffset() { - return 0; - } - - @Override - public byte getFamilyLength() { - return getFamilyLength(getFamilyLengthPosition()); - } - - private byte getFamilyLength(int famLenPos) { - return ByteBufferUtils.toByte(this.buf, famLenPos); - } - - @Override - public byte[] getQualifierArray() { - return CellUtil.cloneQualifier(this); - } - - @Override - public int getQualifierOffset() { - return 0; - } - - @Override - public int getQualifierLength() { - return getQualifierLength(getRowLength(), getFamilyLength()); - } - - private int getQualifierLength(int rlength, int flength) { - return this.length - (int) KeyValue.getKeyDataStructureSize(rlength, flength, 0); - } - - @Override - public long getTimestamp() { - return ByteBufferUtils.toLong(this.buf, getTimestampOffset()); - } - - private int getTimestampOffset() { - return this.offset + this.length - KeyValue.TIMESTAMP_TYPE_SIZE; - } - - @Override - public byte getTypeByte() { - return ByteBufferUtils.toByte(this.buf, this.offset + this.length - 1); - } - - @Override - public long getSequenceId() { - return 0; - } - - @Override - public byte[] getValueArray() { - throw new IllegalArgumentException("This is a key only Cell"); - } - - @Override - public int getValueOffset() { - return 0; - } - - @Override - public int getValueLength() { - return 0; - } - - @Override - public byte[] getTagsArray() { - throw new IllegalArgumentException("This is a key only Cell"); - } - - @Override - public int getTagsOffset() { - return 0; - } - - @Override - public int getTagsLength() { - return 0; - } - - @Override - public ByteBuffer getRowByteBuffer() { - return this.buf; - } - - @Override - public int getRowPositionInByteBuffer() { - return this.offset + Bytes.SIZEOF_SHORT; - } - - @Override - public ByteBuffer getFamilyByteBuffer() { - return this.buf; - } - - @Override - public int getFamilyPositionInByteBuffer() { - return getFamilyLengthPosition() + Bytes.SIZEOF_BYTE; - } - - // The position in BB where the family length is added. - private int getFamilyLengthPosition() { - return this.offset + Bytes.SIZEOF_SHORT + getRowLength(); - } - - @Override - public ByteBuffer getQualifierByteBuffer() { - return this.buf; - } - - @Override - public int getQualifierPositionInByteBuffer() { - int famLenPos = getFamilyLengthPosition(); - return famLenPos + Bytes.SIZEOF_BYTE + getFamilyLength(famLenPos); - } - - @Override - public ByteBuffer getValueByteBuffer() { - throw new IllegalArgumentException("This is a key only Cell"); - } - - @Override - public int getValuePositionInByteBuffer() { - return 0; - } - - @Override - public ByteBuffer getTagsByteBuffer() { - throw new IllegalArgumentException("This is a key only Cell"); - } - - @Override - public int getTagsPositionInByteBuffer() { - return 0; - } - - @Override - public String toString() { - return CellUtil.toString(this, false); - } -} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/OffheapKeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/OffheapKeyValue.java new file mode 100644 index 0000000..b8c796b --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/OffheapKeyValue.java @@ -0,0 +1,278 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.io.HeapSize; +import org.apache.hadoop.hbase.util.ByteBufferUtils; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ClassSize; + +/** + * This implementation of @link {@link ByteBufferedCell} is similar to KeyValue + * implementation but the underlying data is stored in the form of + * {@link ByteBuffer} + */ +@InterfaceAudience.Private +public class OffheapKeyValue extends ByteBufferedCell implements HeapSize, Cloneable, + SettableSequenceId, Streamable { + + protected final ByteBuffer buf; + protected final int offset; + protected final int length; + private final short rowLen; + private final int keyLen; + private long seqId = 0; + private final boolean hasTags; + // TODO : See if famLen can be cached or not? + + private static final int HEAP_SIZE_OVERHEAD = Bytes.SIZEOF_SHORT + Bytes.SIZEOF_INT + + Bytes.SIZEOF_BOOLEAN; + + public OffheapKeyValue(ByteBuffer buf, int offset, int length, boolean hasTags) { + this.buf = buf; + this.offset = offset; + this.length = length; + rowLen = ByteBufferUtils.toShort(this.buf, this.offset + KeyValue.ROW_OFFSET); + keyLen = ByteBufferUtils.toInt(this.buf, this.offset); + this.hasTags = hasTags; + } + + @Override + public byte[] getRowArray() { + return CellUtil.cloneRow(this); + } + + @Override + public int getRowOffset() { + return 0; + } + + @Override + public short getRowLength() { + return this.rowLen; + } + + @Override + public byte[] getFamilyArray() { + return CellUtil.cloneFamily(this); + } + + @Override + public int getFamilyOffset() { + return 0; + } + + @Override + public byte getFamilyLength() { + return getFamilyLength(getFamilyLengthPosition()); + } + + private int getFamilyLengthPosition() { + return this.offset + KeyValue.ROW_KEY_OFFSET + rowLen + Bytes.SIZEOF_BYTE; + } + + private byte getFamilyLength(int famLenPos) { + return ByteBufferUtils.toByte(this.buf, famLenPos - 1); + } + + @Override + public byte[] getQualifierArray() { + return CellUtil.cloneQualifier(this); + } + + @Override + public int getQualifierOffset() { + return 0; + } + + @Override + public int getQualifierLength() { + return getQualifierLength(getRowLength(), getFamilyLength()); + } + + private int getQualifierLength(int rlength, int flength) { + return this.keyLen - (int) KeyValue.getKeyDataStructureSize(rlength, flength, 0); + } + + @Override + public long getTimestamp() { + int offset = getTimestampOffset(this.keyLen); + return ByteBufferUtils.toLong(this.buf, offset); + } + + private int getTimestampOffset(int keyLen) { + return this.offset + KeyValue.ROW_OFFSET + keyLen - KeyValue.TIMESTAMP_TYPE_SIZE; + } + + @Override + public byte getTypeByte() { + return ByteBufferUtils.toByte(this.buf, this.offset + this.keyLen - 1 + KeyValue.ROW_OFFSET); + } + + @Override + public long getSequenceId() { + return this.seqId; + } + + public void setSequenceId(long seqId) { + this.seqId = seqId; + } + + @Override + public byte[] getValueArray() { + return CellUtil.cloneValue(this); + } + + @Override + public int getValueOffset() { + return 0; + } + + @Override + public int getValueLength() { + return ByteBufferUtils.toInt(this.buf, this.offset + Bytes.SIZEOF_INT); + } + + @Override + public byte[] getTagsArray() { + return CellUtil.cloneTags(this); + } + + @Override + public int getTagsOffset() { + return 0; + } + + @Override + public int getTagsLength() { + if(!hasTags) { + return 0; + } + int tagsLen = this.length + - (this.keyLen + getValueLength() + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE); + if (tagsLen > 0) { + // There are some Tag bytes in the byte[]. So reduce 2 bytes which is + // added to denote the tags + // length + tagsLen -= KeyValue.TAGS_LENGTH_SIZE; + } + return tagsLen; + } + + @Override + public ByteBuffer getRowByteBuffer() { + return this.buf; + } + + @Override + public int getRowPositionInByteBuffer() { + return this.offset + KeyValue.ROW_KEY_OFFSET; + } + + @Override + public ByteBuffer getFamilyByteBuffer() { + return this.buf; + } + + @Override + public int getFamilyPositionInByteBuffer() { + return getFamilyLengthPosition(); + } + + @Override + public ByteBuffer getQualifierByteBuffer() { + return this.buf; + } + + @Override + public int getQualifierPositionInByteBuffer() { + return getFamilyLengthPosition() + getFamilyLength(); + } + + @Override + public ByteBuffer getValueByteBuffer() { + return this.buf; + } + + @Override + public int getValuePositionInByteBuffer() { + return this.offset + KeyValue.ROW_OFFSET + this.keyLen; + } + + @Override + public ByteBuffer getTagsByteBuffer() { + return this.buf; + } + + @Override + public int getTagsPositionInByteBuffer() { + int tagsLen = getTagsLength(); + if (tagsLen == 0) { + return this.offset + this.length; + } + return this.offset + this.length - tagsLen; + } + + @Override + public long heapSize() { + int sum = 0; + sum += ClassSize.OBJECT;// the OffheapKeyValue object itself + sum += ClassSize.REFERENCE;// pointer to "buffer" + sum += ClassSize.align(ClassSize.BYTE_BUFFER);// "buffer" + sum += ClassSize.align(length);// number of bytes of data in the "buffer" + sum += 2 * Bytes.SIZEOF_INT + HEAP_SIZE_OVERHEAD;// offset, length, rowlen, + // keylen, famlen + sum += Bytes.SIZEOF_LONG;// seqId + return ClassSize.align(sum); + } + + @Override + public int write(OutputStream out) throws IOException { + return write(out, true); + } + + @Override + public int write(OutputStream out, boolean withTags) throws IOException { + // In KeyValueUtil#oswrite we do a Cell serialization as KeyValue. Any + // changes doing here, pls + // check KeyValueUtil#oswrite also and do necessary changes. + if (!hasTags) { + ByteBufferUtils.writeInt(out, this.length); + ByteBufferUtils.writeByteBuffer(out, this.buf, this.offset, this.length); + return this.length + Bytes.SIZEOF_INT; + } else { + int length = this.length; + if (!withTags) { + length = keyLen + this.getValueLength() + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE; + } + ByteBufferUtils.writeInt(out, length); + ByteBufferUtils.writeByteBuffer(out, this.buf, this.offset, length); + return length + Bytes.SIZEOF_INT; + } + } + + @Override + public String toString() { + return CellUtil.toString(this, true); + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java index a758b26..7827f47 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java @@ -28,19 +28,17 @@ import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.OffheapKeyOnlyKeyValue; +import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.SettableSequenceId; import org.apache.hadoop.hbase.Streamable; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.io.ByteBufferOutputStream; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.TagCompressionContext; import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.util.LRUDictionary; -import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; @@ -445,9 +443,9 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { public int write(OutputStream out, boolean withTags) throws IOException { int lenToWrite = KeyValueUtil.length(rowLength, familyLength, qualifierLength, valueLength, tagsLength, withTags); - writeInt(out, lenToWrite); - writeInt(out, keyOnlyBuffer.length); - writeInt(out, valueLength); + ByteBufferUtils.writeInt(out, lenToWrite); + ByteBufferUtils.writeInt(out, keyOnlyBuffer.length); + ByteBufferUtils.writeInt(out, valueLength); // Write key out.write(keyOnlyBuffer); // Write value @@ -668,47 +666,25 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { public int write(OutputStream out, boolean withTags) throws IOException { int lenToWrite = KeyValueUtil.length(rowLength, familyLength, qualifierLength, valueLength, tagsLength, withTags); - writeInt(out, lenToWrite); - writeInt(out, keyBuffer.capacity()); - writeInt(out, valueLength); + ByteBufferUtils.writeInt(out, lenToWrite); + ByteBufferUtils.writeInt(out, keyBuffer.capacity()); + ByteBufferUtils.writeInt(out, valueLength); // Write key out.write(keyBuffer.array()); // Write value - writeByteBuffer(out, this.valueBuffer, this.valueOffset, this.valueLength); + ByteBufferUtils.writeByteBuffer(out, this.valueBuffer, this.valueOffset, this.valueLength); if (withTags) { // 2 bytes tags length followed by tags bytes // tags length is serialized with 2 bytes only(short way) even if the type is int. // As this is non -ve numbers, we save the sign bit. See HBASE-11437 out.write((byte) (0xff & (this.tagsLength >> 8))); out.write((byte) (0xff & this.tagsLength)); - writeByteBuffer(out, this.tagsBuffer, this.tagsOffset, this.tagsLength); + ByteBufferUtils.writeByteBuffer(out, this.tagsBuffer, this.tagsOffset, this.tagsLength); } return lenToWrite + Bytes.SIZEOF_INT; } } - private static void writeInt(OutputStream out, int v) throws IOException { - // We have writeInt in ByteBufferOutputStream so that it can directly write int to underlying - // ByteBuffer in one step. - if (out instanceof ByteBufferOutputStream) { - ((ByteBufferOutputStream) out).writeInt(v); - } else { - StreamUtils.writeInt(out, v); - } - } - - private static void writeByteBuffer(OutputStream out, ByteBuffer b, int offset, int length) - throws IOException { - // We have write which takes ByteBuffer in ByteBufferOutputStream so that it can directly write - // bytes from the src ByteBuffer to the destination ByteBuffer. This avoid need for temp array - // creation and copy - if (out instanceof ByteBufferOutputStream) { - ((ByteBufferOutputStream) out).write(b, offset, length); - } else { - ByteBufferUtils.copyBufferToStream(out, b, offset, length); - } - } - protected abstract static class BufferedEncodedSeeker implements EncodedSeeker { @@ -1208,7 +1184,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { return new KeyValue.KeyOnlyKeyValue(key.array(), key.arrayOffset() + key.position(), keyLength); } else { - return new OffheapKeyOnlyKeyValue(key, key.position(), keyLength); + return new ByteBufferedKeyOnlyKeyValue(key, key.position(), keyLength); } } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java index b80b38a..b361eee 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java @@ -27,6 +27,8 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; +import org.apache.hadoop.hbase.io.ByteBufferOutputStream; +import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.WritableUtils; @@ -829,4 +831,52 @@ public final class ByteBufferUtils { } } } + + public static void writeInt(OutputStream out, int v) throws IOException { + // We have writeInt in ByteBufferOutputStream so that it can directly write + // int to underlying + // ByteBuffer in one step. + if (out instanceof ByteBufferOutputStream) { + ((ByteBufferOutputStream) out).writeInt(v); + } else { + StreamUtils.writeInt(out, v); + } + } + + public static void writeByteBuffer(OutputStream out, ByteBuffer b, int offset, int length) + throws IOException { + // We have write which takes ByteBuffer in ByteBufferOutputStream so that it + // can directly write + // bytes from the src ByteBuffer to the destination ByteBuffer. This avoid + // need for temp array + // creation and copy + if (out instanceof ByteBufferOutputStream) { + ((ByteBufferOutputStream) out).write(b, offset, length); + } else { + ByteBufferUtils.copyBufferToStream(out, b, offset, length); + } + } + // For testing purpose + public static String toStringBinary(final ByteBuffer b, int off, int len) { + StringBuilder result = new StringBuilder(); + // Just in case we are passed a 'len' that is > buffer length... + if (off >= b.capacity()) + return result.toString(); + if (off + len > b.capacity()) + len = b.capacity() - off; + for (int i = off; i < off + len; ++i) { + int ch = b.get(i) & 0xFF; + if ((ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'Z') || (ch >= 'a' && ch <= 'z') + || " `~!@#$%^&*()-_=+[]{}|;:'\",.<>/?".indexOf(ch) >= 0) { + result.append((char) ch); + } else { + result.append(String.format("\\x%02X", ch)); + } + } + return result.toString(); + } + + public static String toStringBinary(final ByteBuffer b) { + return toStringBinary(b, 0, b.capacity()); + } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestOffheapKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestOffheapKeyValue.java new file mode 100644 index 0000000..3e8d327 --- /dev/null +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestOffheapKeyValue.java @@ -0,0 +1,205 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hbase; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hbase.KeyValue.Type; +import org.apache.hadoop.hbase.testclassification.MiscTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.ByteBufferUtils; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category({ MiscTests.class, SmallTests.class }) +public class TestOffheapKeyValue { + private static final String QUAL2 = "qual2"; + private static final String FAM2 = "fam2"; + private static final String QUAL1 = "qual1"; + private static final String FAM1 = "fam1"; + private static final String ROW1 = "row1"; + private static final byte[] row1 = Bytes.toBytes(ROW1); + private static final byte[] fam1 = Bytes.toBytes(FAM1); + private static final byte[] fam2 = Bytes.toBytes(FAM2); + private static final byte[] qual1 = Bytes.toBytes(QUAL1); + private static final byte[] qual2 = Bytes.toBytes(QUAL2); + private static final Tag t1 = new Tag((byte) 1, Bytes.toBytes("TAG1")); + private static final Tag t2 = new Tag((byte) 2, Bytes.toBytes("TAG2")); + private static final ArrayList tags = new ArrayList(); + static { + tags.add(t1); + tags.add(t2); + } + + @Test + public void testByteBufferBackedKeyValue() throws Exception { + KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0l, Type.Put, row1); + ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length); + ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length); + ByteBufferedCell offheapKV = new OffheapKeyValue(buf, 0, buf.capacity(), false); + assertEquals( + ROW1, + ByteBufferUtils.toStringBinary(offheapKV.getRowByteBuffer(), + offheapKV.getRowPositionInByteBuffer(), offheapKV.getRowLength())); + assertEquals( + FAM1, + ByteBufferUtils.toStringBinary(offheapKV.getFamilyByteBuffer(), + offheapKV.getFamilyPositionInByteBuffer(), offheapKV.getFamilyLength())); + assertEquals( + QUAL1, + ByteBufferUtils.toStringBinary(offheapKV.getQualifierByteBuffer(), + offheapKV.getQualifierPositionInByteBuffer(), offheapKV.getQualifierLength())); + assertEquals( + ROW1, + ByteBufferUtils.toStringBinary(offheapKV.getValueByteBuffer(), + offheapKV.getValuePositionInByteBuffer(), offheapKV.getValueLength())); + assertEquals(0L, offheapKV.getTimestamp()); + assertEquals(Type.Put.getCode(), offheapKV.getTypeByte()); + + // Use the array() APIs + assertEquals( + ROW1, + Bytes.toStringBinary(offheapKV.getRowArray(), + offheapKV.getRowOffset(), offheapKV.getRowLength())); + assertEquals( + FAM1, + Bytes.toStringBinary(offheapKV.getFamilyArray(), + offheapKV.getFamilyOffset(), offheapKV.getFamilyLength())); + assertEquals( + QUAL1, + Bytes.toStringBinary(offheapKV.getQualifierArray(), + offheapKV.getQualifierOffset(), offheapKV.getQualifierLength())); + assertEquals( + ROW1, + Bytes.toStringBinary(offheapKV.getValueArray(), + offheapKV.getValueOffset(), offheapKV.getValueLength())); + assertEquals(0L, offheapKV.getTimestamp()); + assertEquals(Type.Put.getCode(), offheapKV.getTypeByte()); + + kvCell = new KeyValue(row1, fam2, qual2, 0l, Type.Put, row1); + buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length); + ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length); + offheapKV = new OffheapKeyValue(buf, 0, buf.capacity(), false); + assertEquals( + FAM2, + ByteBufferUtils.toStringBinary(offheapKV.getFamilyByteBuffer(), + offheapKV.getFamilyPositionInByteBuffer(), offheapKV.getFamilyLength())); + assertEquals( + QUAL2, + ByteBufferUtils.toStringBinary(offheapKV.getQualifierByteBuffer(), + offheapKV.getQualifierPositionInByteBuffer(), offheapKV.getQualifierLength())); + byte[] nullQualifier = new byte[0]; + kvCell = new KeyValue(row1, fam1, nullQualifier, 0L, Type.Put, row1); + buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length); + ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length); + offheapKV = new OffheapKeyValue(buf, 0, buf.capacity(), false); + assertEquals( + ROW1, + ByteBufferUtils.toStringBinary(offheapKV.getRowByteBuffer(), + offheapKV.getRowPositionInByteBuffer(), offheapKV.getRowLength())); + assertEquals( + FAM1, + ByteBufferUtils.toStringBinary(offheapKV.getFamilyByteBuffer(), + offheapKV.getFamilyPositionInByteBuffer(), offheapKV.getFamilyLength())); + assertEquals( + "", + ByteBufferUtils.toStringBinary(offheapKV.getQualifierByteBuffer(), + offheapKV.getQualifierPositionInByteBuffer(), offheapKV.getQualifierLength())); + assertEquals( + ROW1, + ByteBufferUtils.toStringBinary(offheapKV.getValueByteBuffer(), + offheapKV.getValuePositionInByteBuffer(), offheapKV.getValueLength())); + assertEquals(0L, offheapKV.getTimestamp()); + assertEquals(Type.Put.getCode(), offheapKV.getTypeByte()); + } + + @Test + public void testByteBufferBackedKeyValueWithTags() throws Exception { + KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0l, Type.Put, row1, tags); + ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length); + ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length); + ByteBufferedCell offheapKV = new OffheapKeyValue(buf, 0, buf.capacity(), true); + assertEquals( + ROW1, + ByteBufferUtils.toStringBinary(offheapKV.getRowByteBuffer(), + offheapKV.getRowPositionInByteBuffer(), offheapKV.getRowLength())); + assertEquals( + FAM1, + ByteBufferUtils.toStringBinary(offheapKV.getFamilyByteBuffer(), + offheapKV.getFamilyPositionInByteBuffer(), offheapKV.getFamilyLength())); + assertEquals( + QUAL1, + ByteBufferUtils.toStringBinary(offheapKV.getQualifierByteBuffer(), + offheapKV.getQualifierPositionInByteBuffer(), offheapKV.getQualifierLength())); + assertEquals( + ROW1, + ByteBufferUtils.toStringBinary(offheapKV.getValueByteBuffer(), + offheapKV.getValuePositionInByteBuffer(), offheapKV.getValueLength())); + assertEquals(0L, offheapKV.getTimestamp()); + assertEquals(Type.Put.getCode(), offheapKV.getTypeByte()); + // change tags to handle both onheap and offheap stuff + List resTags = + Tag.asList(offheapKV.getTagsArray(), offheapKV.getTagsOffset(), offheapKV.getTagsLength()); + Tag tag1 = resTags.get(0); + assertEquals(t1.getType(), tag1.getType()); + assertEquals(Bytes.toString(t1.getValue()), Bytes.toString(getTagValue(tag1))); + Tag tag2 = resTags.get(1); + assertEquals(tag2.getType(), tag2.getType()); + assertEquals(Bytes.toString(t2.getValue()), Bytes.toString(getTagValue(tag2))); + Tag res = Tag.getTag(offheapKV.getTagsArray(), 0, offheapKV.getTagsLength(), (byte) 2); + assertEquals(Bytes.toString(t2.getValue()), Bytes.toString(getTagValue(tag2))); + res = Tag.getTag(offheapKV.getTagsArray(), 0, offheapKV.getTagsLength(), (byte) 3); + assertNull(res); + } + + @Test + public void testGetKeyMethods() throws Exception { + KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0l, Type.Put, row1, tags); + ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getKeyLength()); + ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), kvCell.getKeyOffset(), + kvCell.getKeyLength()); + ByteBufferedCell offheapKeyOnlyKV = new ByteBufferedKeyOnlyKeyValue(buf, 0, buf.capacity()); + assertEquals( + ROW1, + ByteBufferUtils.toStringBinary(offheapKeyOnlyKV.getRowByteBuffer(), + offheapKeyOnlyKV.getRowPositionInByteBuffer(), offheapKeyOnlyKV.getRowLength())); + assertEquals( + FAM1, + ByteBufferUtils.toStringBinary(offheapKeyOnlyKV.getFamilyByteBuffer(), + offheapKeyOnlyKV.getFamilyPositionInByteBuffer(), offheapKeyOnlyKV.getFamilyLength())); + assertEquals( + QUAL1, + ByteBufferUtils.toStringBinary(offheapKeyOnlyKV.getQualifierByteBuffer(), + offheapKeyOnlyKV.getQualifierPositionInByteBuffer(), + offheapKeyOnlyKV.getQualifierLength())); + assertEquals(0L, offheapKeyOnlyKV.getTimestamp()); + assertEquals(Type.Put.getCode(), offheapKeyOnlyKV.getTypeByte()); + } + // TODO : Can be moved to TagUtil + private static byte[] getTagValue(Tag tag) { + int tagLength = tag.getTagLength(); + byte[] tagBytes = new byte[tagLength]; + System.arraycopy(tag.getBuffer(), tag.getTagOffset(), tagBytes, 0, tagLength); + return tagBytes; + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java index 3494145..b1e0a4e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java @@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KeyOnlyKeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; @@ -728,7 +729,7 @@ public class HFileBlockIndex { // If we imagine that keys[-1] = -Infinity and // keys[numEntries] = Infinity, then we are maintaining an invariant that // keys[low - 1] < key < keys[high + 1] while narrowing down the range. - KeyValue.KeyOnlyKeyValue nonRootIndexKV = new KeyValue.KeyOnlyKeyValue(); + ByteBufferedKeyOnlyKeyValue nonRootIndexkeyOnlyKV = new ByteBufferedKeyOnlyKeyValue(); Pair pair = new Pair(); while (low <= high) { mid = (low + high) >>> 1; @@ -753,9 +754,8 @@ public class HFileBlockIndex { // done after HBASE-12224 & HBASE-12282 // TODO avoid array call. nonRootIndex.asSubByteBuffer(midKeyOffset, midLength, pair); - nonRootIndexKV.setKey(pair.getFirst().array(), - pair.getFirst().arrayOffset() + pair.getSecond(), midLength); - int cmp = comparator.compareKeyIgnoresMvcc(key, nonRootIndexKV); + nonRootIndexkeyOnlyKV.setKey(pair.getFirst(), pair.getSecond(), midLength); + int cmp = comparator.compareKeyIgnoresMvcc(key, nonRootIndexkeyOnlyKV); // key lives above the midpoint if (cmp > 0) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java index 4a11b14..0b6628a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java @@ -34,6 +34,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue; +import org.apache.hadoop.hbase.OffheapKeyValue; import org.apache.hadoop.hbase.ShareableMemory; import org.apache.hadoop.hbase.SizeCachedKeyValue; import org.apache.hadoop.hbase.HConstants; @@ -52,6 +54,7 @@ import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.IdLock; import org.apache.hadoop.hbase.util.Pair; @@ -464,7 +467,8 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { protected volatile int blockFetches; protected final HFile.Reader reader; private int currTagsLen; - private KeyValue.KeyOnlyKeyValue keyOnlyKv = new KeyValue.KeyOnlyKeyValue(); + // buffer backed keyonlyKV + private ByteBufferedKeyOnlyKeyValue bufBackedKeyOnlyKv = new ByteBufferedKeyOnlyKeyValue(); // A pair for reusing in blockSeek() so that we don't garbage lot of objects final Pair pair = new Pair(); @@ -675,10 +679,8 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { } offsetFromPos += Bytes.SIZEOF_LONG; blockBuffer.asSubByteBuffer(blockBuffer.position() + offsetFromPos, klen, pair); - // TODO :change here after Bufferbackedcells come - keyOnlyKv.setKey(pair.getFirst().array(), pair.getFirst().arrayOffset() + pair.getSecond(), - klen); - int comp = reader.getComparator().compareKeyIgnoresMvcc(key, keyOnlyKv); + bufBackedKeyOnlyKv.setKey(pair.getFirst(), pair.getSecond(), klen); + int comp = reader.getComparator().compareKeyIgnoresMvcc(key, bufBackedKeyOnlyKv); offsetFromPos += klen + vlen; if (this.reader.getFileContext().isIncludesTags()) { // Read short as unsigned, high byte first @@ -888,40 +890,62 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { if (!isSeeked()) return null; - KeyValue ret; // TODO : reduce the varieties of KV here. Check if based on a boolean - // we can handle the 'no tags' case - // TODO : Handle MBB here - if (currTagsLen > 0) { + // we can handle the 'no tags' case. + Cell ret; + int cellBufSize = getCellBufSize(); + if (!blockBuffer.hasArray()) { + ByteBuffer buf = blockBuffer.asSubByteBuffer(cellBufSize); if (this.curBlock.getMemoryType() == MemoryType.SHARED) { - ret = new ShareableMemoryKeyValue(blockBuffer.array(), blockBuffer.arrayOffset() - + blockBuffer.position(), getCellBufSize()); + ret = new ShareableMemoryOffheapKeyValue(buf, buf.position(), cellBufSize, + currTagsLen > 0); } else { - ret = new SizeCachedKeyValue(blockBuffer.array(), blockBuffer.arrayOffset() - + blockBuffer.position(), getCellBufSize()); + ret = new OffheapKeyValue(buf, buf.position(), cellBufSize, currTagsLen > 0); + } + if (this.reader.shouldIncludeMemstoreTS()) { + ((OffheapKeyValue) ret).setSequenceId(currMemstoreTS); } } else { - if (this.curBlock.getMemoryType() == MemoryType.SHARED) { - ret = new ShareableMemoryNoTagsKeyValue(blockBuffer.array(), blockBuffer.arrayOffset() - + blockBuffer.position(), getCellBufSize()); + if(currTagsLen > 0) { + if (this.curBlock.getMemoryType() == MemoryType.SHARED) { + ret = new ShareableMemoryKeyValue(blockBuffer.array(), blockBuffer.arrayOffset() + + blockBuffer.position(), getCellBufSize()); + } else { + ret = new SizeCachedKeyValue(blockBuffer.array(), blockBuffer.arrayOffset() + + blockBuffer.position(), cellBufSize); + } + if (this.reader.shouldIncludeMemstoreTS()) { + ((SizeCachedKeyValue) ret).setSequenceId(currMemstoreTS); + } } else { - ret = new SizeCachedNoTagsKeyValue(blockBuffer.array(), blockBuffer.arrayOffset() - + blockBuffer.position(), getCellBufSize()); + if (this.curBlock.getMemoryType() == MemoryType.SHARED) { + ret = new ShareableMemoryNoTagsKeyValue(blockBuffer.array(), blockBuffer.arrayOffset() + + blockBuffer.position(), getCellBufSize()); + } else { + ret = new SizeCachedNoTagsKeyValue(blockBuffer.array(), blockBuffer.arrayOffset() + + blockBuffer.position(), cellBufSize); + } + if (this.reader.shouldIncludeMemstoreTS()) { + ((SizeCachedNoTagsKeyValue) ret).setSequenceId(currMemstoreTS); + } } } - - if (this.reader.shouldIncludeMemstoreTS()) { - ret.setSequenceId(currMemstoreTS); - } return ret; } @Override public Cell getKey() { assertSeeked(); - return new KeyValue.KeyOnlyKeyValue(blockBuffer.array(), - blockBuffer.arrayOffset() + blockBuffer.position() - + KEY_VALUE_LEN_SIZE, currKeyLen); + // Create a new object so that this getKey is cached as firstKey, lastKey + Pair keyPair = new Pair(); + blockBuffer.asSubByteBuffer(blockBuffer.position() + KEY_VALUE_LEN_SIZE, currKeyLen, keyPair); + ByteBuffer keyBuf = keyPair.getFirst(); + if (keyBuf.hasArray()) { + return new KeyValue.KeyOnlyKeyValue(keyBuf.array(), keyBuf.arrayOffset() + + keyPair.getSecond(), currKeyLen); + } else { + return new ByteBufferedKeyOnlyKeyValue(keyBuf, keyPair.getSecond(), currKeyLen); + } } private static class ShareableMemoryKeyValue extends SizeCachedKeyValue implements @@ -950,14 +974,32 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { } } + private static class ShareableMemoryOffheapKeyValue extends OffheapKeyValue implements + ShareableMemory { + public ShareableMemoryOffheapKeyValue(ByteBuffer buf, int offset, int length, + boolean hasTags) { + super(buf, offset, length, hasTags); + } + + @Override + public Cell cloneToCell() { + byte[] copy = new byte[this.length]; + ByteBufferUtils.copyFromBufferToArray(copy, this.buf, this.offset, 0, this.length); + return new SizeCachedKeyValue(copy, 0, copy.length); + } + } + @Override public ByteBuffer getValue() { assertSeeked(); - // TODO : change here after BufferBacked cells come - return ByteBuffer.wrap( - blockBuffer.array(), - blockBuffer.arrayOffset() + blockBuffer.position() - + KEY_VALUE_LEN_SIZE + currKeyLen, currValueLen).slice(); + // Okie to create new Pair. Not used in hot path + Pair valuePair = new Pair(); + this.blockBuffer.asSubByteBuffer(blockBuffer.position() + KEY_VALUE_LEN_SIZE + currKeyLen, + currValueLen, valuePair); + ByteBuffer valBuf = valuePair.getFirst().duplicate(); + valBuf.position(valuePair.getSecond()); + valBuf.limit(currValueLen + valuePair.getSecond()); + return valBuf.slice(); } protected void setNonSeekedState() { @@ -1152,31 +1194,28 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { buffer.skip(Bytes.SIZEOF_INT);// Skip value len part ByteBuffer keyBuff = buffer.asSubByteBuffer(klen); keyBuff.limit(keyBuff.position() + klen); - // Create a KeyOnlyKv now. - // TODO : Will change when Buffer backed cells come - return new KeyValue.KeyOnlyKeyValue(keyBuff.array(), keyBuff.arrayOffset() - + keyBuff.position(), klen); + if (keyBuff.hasArray()) { + return new KeyValue.KeyOnlyKeyValue(keyBuff.array(), keyBuff.arrayOffset() + + keyBuff.position(), klen); + } else { + return new ByteBufferedKeyOnlyKeyValue(keyBuff, keyBuff.position(), klen); + } } @Override public String getKeyString() { - return Bytes.toStringBinary(blockBuffer.array(), - blockBuffer.arrayOffset() + blockBuffer.position() - + KEY_VALUE_LEN_SIZE, currKeyLen); + return CellUtil.toString(getKey(), false); } @Override public String getValueString() { - return Bytes.toString(blockBuffer.array(), blockBuffer.arrayOffset() - + blockBuffer.position() + KEY_VALUE_LEN_SIZE + currKeyLen, - currValueLen); + return ByteBufferUtils.toStringBinary(getValue()); } public int compareKey(CellComparator comparator, Cell key) { - this.keyOnlyKv.setKey(blockBuffer.array(), blockBuffer.arrayOffset() - + blockBuffer.position() + KEY_VALUE_LEN_SIZE, currKeyLen); - return comparator.compareKeyIgnoresMvcc( - key, this.keyOnlyKv); + blockBuffer.asSubByteBuffer(blockBuffer.position() + KEY_VALUE_LEN_SIZE, currKeyLen, pair); + this.bufBackedKeyOnlyKv.setKey(pair.getFirst(), pair.getSecond(), currKeyLen); + return comparator.compareKeyIgnoresMvcc(key, this.bufBackedKeyOnlyKv); } @Override @@ -1534,7 +1573,6 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { private final HFileBlockDecodingContext decodingCtx; private final DataBlockEncoder.EncodedSeeker seeker; private final DataBlockEncoder dataBlockEncoder; - private final HFileContext meta; public EncodedScanner(HFile.Reader reader, boolean cacheBlocks, boolean pread, boolean isCompaction, HFileContext meta) { @@ -1544,7 +1582,6 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { decodingCtx = dataBlockEncoder.newDataBlockDecodingContext(meta); seeker = dataBlockEncoder.createSeeker( reader.getComparator(), decodingCtx); - this.meta = meta; } @Override @@ -1644,8 +1681,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { @Override public String getValueString() { ByteBuffer valueBuffer = getValue(); - return Bytes.toStringBinary(valueBuffer.array(), - valueBuffer.arrayOffset(), valueBuffer.limit()); + return ByteBufferUtils.toStringBinary(valueBuffer); } private void assertValidSeek() { @@ -1708,22 +1744,6 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { return true; // We load file info in constructor in version 2. } - /** - * Validates that the minor version is within acceptable limits. - * Otherwise throws an Runtime exception - */ - private void validateMinorVersion(Path path, int minorVersion) { - if (minorVersion < MIN_MINOR_VERSION || - minorVersion > MAX_MINOR_VERSION) { - String msg = "Minor version for path " + path + - " is expected to be between " + - MIN_MINOR_VERSION + " and " + MAX_MINOR_VERSION + - " but is found to be " + minorVersion; - LOG.error(msg); - throw new RuntimeException(msg); - } - } - @Override public HFileContext getFileContext() { return hfileContext; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.java index 092234b..7e0badb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.java @@ -24,7 +24,6 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType; import org.apache.hadoop.hbase.nio.ByteBuff; -import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.hbase.util.ByteBufferArray; import org.apache.hadoop.hbase.util.Pair; @@ -69,22 +68,19 @@ public class ByteBufferIOEngine implements IOEngine { @Override public Pair read(long offset, int length) throws IOException { - // TODO : this allocate and copy will go away once we create BB backed cells - ByteBuffer dstBuffer = ByteBuffer.allocate(length); - bufferArray.getMultiple(offset, dstBuffer.remaining(), dstBuffer.array(), - dstBuffer.arrayOffset()); + ByteBuff dstBuffer = bufferArray.asSubByteBuff(offset, length); // Here the buffer that is created directly refers to the buffer in the actual buckets. // When any cell is referring to the blocks created out of these buckets then it means that // those cells are referring to a shared memory area which if evicted by the BucketCache would // lead to corruption of results. Hence we set the type of the buffer as SHARED_MEMORY // so that the readers using this block are aware of this fact and do the necessary action // to prevent eviction till the results are either consumed or copied + // TODO : check this once if (dstBuffer.limit() != length) { throw new RuntimeException("Only " + dstBuffer.limit() + " bytes read, " + length + " expected"); } - // TODO : to be removed - make it conditional - return new Pair(new SingleByteBuff(dstBuffer), MemoryType.SHARED); + return new Pair(dstBuffer, MemoryType.SHARED); } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestByteBufferIOEngine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestByteBufferIOEngine.java index 41669fd..3c5cb6d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestByteBufferIOEngine.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestByteBufferIOEngine.java @@ -107,10 +107,6 @@ public class TestByteBufferIOEngine { offset = (int) (Math.random() * (capacity - maxBlockSize)); } ioEngine.write(srcBuffer, offset); - //ByteBuffer dstBuffer = ByteBuffer.allocate(blockSize); - //ioEngine.read(dstBuffer, offset); - //MultiByteBuffer read = new MultiByteBuffer(dstBuffer); - // TODO : this will get changed after HBASE-12295 goes in Pair read = ioEngine.read(offset, blockSize); for (int j = 0; j < byteArray.length; ++j) { assertTrue(srcBuffer.get(j) == read.getFirst().get(j));