Index: src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java (revision 1296737) +++ src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java (working copy) @@ -353,6 +353,7 @@ case INCLUDE: case INCLUDE_AND_SEEK_NEXT_ROW: case INCLUDE_AND_SEEK_NEXT_COL: + case INCLUDE_AND_NEXT_USING_HINT: Filter f = matcher.getFilter(); results.add(f == null ? kv : f.transform(kv)); @@ -365,6 +366,13 @@ reseek(matcher.getKeyForNextRow(kv)); } else if (qcode == ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL) { reseek(matcher.getKeyForNextColumn(kv)); + } else if (qcode == ScanQueryMatcher.MatchCode.INCLUDE_AND_NEXT_USING_HINT) { + KeyValue nextKV = matcher.getNextKeyHint(kv); + if (nextKV != null) { + reseek(nextKV); + } else { + heap.next(); + } } else { this.heap.next(); } Index: src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java (revision 1296737) +++ src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java (working copy) @@ -338,6 +338,7 @@ * counter for even that KV which may be discarded later on by Filter. This * would lead to incorrect results in certain cases. */ + boolean filterSeek = false; if (filter != null) { ReturnCode filterResponse = filter.filterKeyValue(kv); if (filterResponse == ReturnCode.SKIP) { @@ -349,6 +350,8 @@ return MatchCode.SEEK_NEXT_ROW; } else if (filterResponse == ReturnCode.SEEK_NEXT_USING_HINT) { return MatchCode.SEEK_NEXT_USING_HINT; + } else if (filterResponse == ReturnCode.INCLUDE_AND_SEEK_NEXT_USING_HINT) { + filterSeek = true; } } @@ -359,11 +362,20 @@ * SEEK_NEXT_COL, SEEK_NEXT_ROW, SKIP or INCLUDE. Therefore, always return * the MatchCode. If it is SEEK_NEXT_ROW, also set stickyNextRow. */ + if (filterSeek) { + switch(colChecker) { + case INCLUDE: + case INCLUDE_AND_NEXT_USING_HINT: + case INCLUDE_AND_SEEK_NEXT_COL: + case INCLUDE_AND_SEEK_NEXT_ROW: + // assuming seek using hint will skip the most KVs + return MatchCode.INCLUDE_AND_NEXT_USING_HINT; + } + } if (colChecker == MatchCode.SEEK_NEXT_ROW) { stickyNextRow = true; } return colChecker; - } public boolean moreRowsMayExistAfter(KeyValue kv) { @@ -500,5 +512,10 @@ * Include KeyValue and done with row, seek to next. */ INCLUDE_AND_SEEK_NEXT_ROW, + + /** + * Include KeyValue Seek to next key which is given as hint. + */ + INCLUDE_AND_NEXT_USING_HINT, } } Index: src/main/java/org/apache/hadoop/hbase/filter/Filter.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/filter/Filter.java (revision 1296737) +++ src/main/java/org/apache/hadoop/hbase/filter/Filter.java (working copy) @@ -133,6 +133,11 @@ * Seek to next key which is given as hint by the filter. */ SEEK_NEXT_USING_HINT, + /** + * Include the KeyValue and seek to next key which is given as hint by the + * filter. + */ + INCLUDE_AND_SEEK_NEXT_USING_HINT, } /** Index: src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java (revision 1296737) +++ src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java (working copy) @@ -1517,6 +1517,14 @@ } } + public void testIncludeAndNextRowUsingHint() throws Exception { + CustomIncludeAndNextUsingHintFilter filter = new CustomIncludeAndNextUsingHintFilter( + new byte[][] { Bytes.toBytes("testRowTwo-0"), Bytes.toBytes("testRowTwo-2") }); + Scan s = new Scan(); + s.setFilter(filter); + verifyScan(s, 2, 1); + } + @org.junit.Rule public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu = new org.apache.hadoop.hbase.ResourceCheckerJUnitRule(); Index: src/test/java/org/apache/hadoop/hbase/filter/CustomIncludeAndNextUsingHintFilter.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/filter/CustomIncludeAndNextUsingHintFilter.java (revision 0) +++ src/test/java/org/apache/hadoop/hbase/filter/CustomIncludeAndNextUsingHintFilter.java (revision 0) @@ -0,0 +1,59 @@ +package org.apache.hadoop.hbase.filter; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; + +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.util.Bytes; + +public class CustomIncludeAndNextUsingHintFilter extends FilterBase { + private byte[][] seekHints; + transient private int count = 0; + + public CustomIncludeAndNextUsingHintFilter(byte[][] hints) { + this.seekHints = hints; + } + + @Override + public ReturnCode filterKeyValue(KeyValue kv) { + int res = Bytes.compareTo(kv.getRow(), seekHints[count]); + if (res < 0) { + return ReturnCode.SEEK_NEXT_USING_HINT; + } else if (res == 0) { + count++; + return ReturnCode.INCLUDE_AND_SEEK_NEXT_USING_HINT; + } else { + throw new RuntimeException("encountered previous KV"); + } + } + + @Override + public boolean filterAllRemaining() { + return count >= seekHints.length; + } + + @Override + public KeyValue getNextKeyHint(KeyValue kv) { + if (count >= seekHints.length) + return null; + return KeyValue.createFirstOnRow(seekHints[count]); + } + + @Override + public void write(DataOutput out) throws IOException { + out.writeInt(seekHints.length); + for (byte [] hint : seekHints) { + Bytes.writeByteArray(out, hint); + } + } + + @Override + public void readFields(DataInput in) throws IOException { + int count = in.readInt(); + this.seekHints = new byte[count][]; + for (int i=0; i