Index: src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java (revision 1533843) +++ src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java (working copy) @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.io.RawComparator; /** * A facade for a {@link org.apache.hadoop.hbase.io.hfile.HFile.Reader} that serves up @@ -280,6 +281,11 @@ public boolean isSeeked() { return this.delegate.isSeeked(); } + + @Override + public int compareKey(RawComparator comparator, byte[] key, int offset, int length) throws IOException { + return delegate.compareKey(comparator, key, offset, length); + } }; } Index: src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java (revision 1533843) +++ src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java (working copy) @@ -125,6 +125,12 @@ } @Override + public int compareKey(RawComparator comparator, byte[] key, int offset, int length) + throws IOException { + return comparator.compare(key, offset, length, current.keyBuffer, 0, current.keyLength); + } + + @Override public void setCurrentBuffer(ByteBuffer buffer) { currentBuffer = buffer; decodeFirst(); Index: src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java (revision 1533843) +++ src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java (working copy) @@ -156,5 +156,16 @@ */ public int seekToKeyInBlock(byte[] key, int offset, int length, boolean seekBefore); + + /** + * Compare the given key against the current key + * @param comparator + * @param key + * @param offset + * @param length + * @return -1 is the passed key is smaller than the current key, 0 if equal and 1 if greater + * @throws IOException + */ + public int compareKey(RawComparator comparator, byte[] key, int offset, int length) throws IOException; } } Index: src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java (revision 1533843) +++ src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java (working copy) @@ -339,6 +339,14 @@ public HFile.Reader getReader() { return reader; } + + @Override + public int compareKey(RawComparator comparator, byte[] key, int offset, int length) + throws IOException { + ByteBuffer bb = getKey(); + return reader.getComparator().compare(key, offset, + length, bb.array(), bb.arrayOffset(), bb.limit()); + } } /** For testing */ Index: src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java (revision 1533843) +++ src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java (working copy) @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.IdLock; +import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.WritableUtils; /** @@ -514,9 +515,7 @@ public int reseekTo(byte[] key, int offset, int length) throws IOException { int compared; if (isSeeked()) { - ByteBuffer bb = getKey(); - compared = reader.getComparator().compare(key, offset, - length, bb.array(), bb.arrayOffset(), bb.limit()); + compared = compareKey(reader.getComparator(), key, offset, length); if (compared < 1) { // If the required key is less than or equal to current key, then // don't do anything. @@ -647,6 +646,13 @@ } @Override + public int compareKey(RawComparator comparator, byte[] key, int offset, int length) + throws IOException { + return comparator.compare(key, offset, length, blockBuffer.array(), blockBuffer.arrayOffset() + + blockBuffer.position() + KEY_VALUE_LEN_SIZE, currKeyLen); + } + + @Override public ByteBuffer getValue() { assertSeeked(); return ByteBuffer.wrap( @@ -1042,6 +1048,12 @@ } @Override + public int compareKey(RawComparator comparator, byte[] key, int offset, int length) + throws IOException { + return seeker.compareKey(comparator, key, offset, length); + } + + @Override public ByteBuffer getValue() { assertValidSeek(); return seeker.getValueShallowCopy(); Index: src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java (revision 1533843) +++ src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java (working copy) @@ -23,6 +23,7 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.io.RawComparator; /** * A scanner allows you to position yourself within a HFile and @@ -143,4 +144,14 @@ * Otherwise returns false. */ public boolean isSeeked(); -} \ No newline at end of file + /** + * Compare the given key against the current key + * @param comparator + * @param key + * @param offset + * @param length + * @return -1 is the passed key is smaller than the current key, 0 if equal and 1 if greater + * @throws IOException + */ + public int compareKey(RawComparator comparator, byte[] key, int offset, int length) throws IOException; +}