Index: hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java (revision 1401545) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java (working copy) @@ -76,7 +76,7 @@ private void basicFilterTests(ColumnPaginationFilter filter) throws Exception { KeyValue kv = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_1); - assertTrue("basicFilter1", filter.filterKeyValue(kv) == Filter.ReturnCode.INCLUDE); + assertTrue("basicFilter1", filter.filterKeyValue(kv) == Filter.ReturnCode.INCLUDE_AND_NEXT_COL); } /** Index: hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java (revision 1401545) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java (working copy) @@ -286,6 +286,59 @@ } /** + * Test filterKeyValue logic. + * @throws Exception + */ + public void testFilterKeyValue() throws Exception { + Filter includeFilter = new FilterBase() { + @Override + public Filter.ReturnCode filterKeyValue(KeyValue v) { + return Filter.ReturnCode.INCLUDE; + } + }; + + Filter alternateFilter = new FilterBase() { + boolean returnInclude = true; + + @Override + public Filter.ReturnCode filterKeyValue(KeyValue v) { + Filter.ReturnCode returnCode = returnInclude ? Filter.ReturnCode.INCLUDE : + Filter.ReturnCode.SKIP; + returnInclude = !returnInclude; + return returnCode; + } + }; + + Filter alternateIncludeFilter = new FilterBase() { + boolean returnIncludeOnly = false; + + @Override + public Filter.ReturnCode filterKeyValue(KeyValue v) { + Filter.ReturnCode returnCode = returnIncludeOnly ? Filter.ReturnCode.INCLUDE : + Filter.ReturnCode.INCLUDE_AND_NEXT_COL; + returnIncludeOnly = !returnIncludeOnly; + return returnCode; + } + }; + + // Check must pass one filter. + FilterList mpOnefilterList = new FilterList(Operator.MUST_PASS_ONE, + Arrays.asList(new Filter[] { includeFilter, alternateIncludeFilter, alternateFilter })); + // INCLUDE, INCLUDE, INCLUDE_AND_NEXT_COL. + assertEquals(Filter.ReturnCode.INCLUDE_AND_NEXT_COL, mpOnefilterList.filterKeyValue(null)); + // INCLUDE, SKIP, INCLUDE. + assertEquals(Filter.ReturnCode.INCLUDE, mpOnefilterList.filterKeyValue(null)); + + // Check must pass all filter. + FilterList mpAllfilterList = new FilterList(Operator.MUST_PASS_ALL, + Arrays.asList(new Filter[] { includeFilter, alternateIncludeFilter, alternateFilter })); + // INCLUDE, INCLUDE, INCLUDE_AND_NEXT_COL. + assertEquals(Filter.ReturnCode.INCLUDE_AND_NEXT_COL, mpAllfilterList.filterKeyValue(null)); + // INCLUDE, SKIP, INCLUDE. + assertEquals(Filter.ReturnCode.SKIP, mpAllfilterList.filterKeyValue(null)); + } + + /** * Test pass-thru of hints. */ @Test Index: hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java =================================================================== --- hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java (revision 1401545) +++ hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java (working copy) @@ -1493,8 +1493,14 @@ @Test public void testColumnPaginationFilter() throws Exception { + // Test that the filter skips multiple column versions. + Put p = new Put(ROWS_ONE[0]); + p.setWriteToWAL(false); + p.add(FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]); + this.region.put(p); + this.region.flushcache(); - // Set of KVs (page: 1; pageSize: 1) - the first set of 1 column per row + // Set of KVs (page: 1; pageSize: 1) - the first set of 1 column per row KeyValue [] expectedKVs = { // testRowOne-0 new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), Index: hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java (revision 1401545) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java (working copy) @@ -338,8 +338,9 @@ * counter for even that KV which may be discarded later on by Filter. This * would lead to incorrect results in certain cases. */ + ReturnCode filterResponse = ReturnCode.SKIP; if (filter != null) { - ReturnCode filterResponse = filter.filterKeyValue(kv); + filterResponse = filter.filterKeyValue(kv); if (filterResponse == ReturnCode.SKIP) { return MatchCode.SKIP; } else if (filterResponse == ReturnCode.NEXT_COL) { @@ -361,6 +362,9 @@ */ if (colChecker == MatchCode.SEEK_NEXT_ROW) { stickyNextRow = true; + } else if (filter != null && colChecker == MatchCode.INCLUDE && + filterResponse == ReturnCode.INCLUDE_AND_NEXT_COL) { + return MatchCode.INCLUDE_AND_SEEK_NEXT_COL; } return colChecker; Index: hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java (revision 1401545) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java (working copy) @@ -32,7 +32,8 @@ /** * A filter, based on the ColumnCountGetFilter, takes two arguments: limit and offset. * This filter can be used for row-based indexing, where references to other tables are stored across many columns, - * in order to efficient lookups and paginated results for end users. + * in order to efficient lookups and paginated results for end users. Only most recent versions are considered + * for pagination. */ @InterfaceAudience.Public @InterfaceStability.Stable @@ -72,7 +73,8 @@ return ReturnCode.NEXT_ROW; } - ReturnCode code = count < offset ? ReturnCode.SKIP : ReturnCode.INCLUDE; + ReturnCode code = count < offset ? ReturnCode.NEXT_COL : + ReturnCode.INCLUDE_AND_NEXT_COL; count++; return code; } Index: hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java (revision 1401545) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java (working copy) @@ -202,6 +202,9 @@ } ReturnCode code = filter.filterKeyValue(v); switch (code) { + // Override INCLUDE and continue to evaluate. + case INCLUDE_AND_NEXT_COL: + rc = ReturnCode.INCLUDE_AND_NEXT_COL; case INCLUDE: continue; default: @@ -214,7 +217,12 @@ switch (filter.filterKeyValue(v)) { case INCLUDE: - rc = ReturnCode.INCLUDE; + if (rc != ReturnCode.INCLUDE_AND_NEXT_COL) { + rc = ReturnCode.INCLUDE; + } + break; + case INCLUDE_AND_NEXT_COL: + rc = ReturnCode.INCLUDE_AND_NEXT_COL; // must continue here to evaluate all filters break; case NEXT_ROW: Index: hbase-server/src/main/java/org/apache/hadoop/hbase/filter/Filter.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/filter/Filter.java (revision 1401545) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/filter/Filter.java (working copy) @@ -116,6 +116,10 @@ */ INCLUDE, /** + * Include the KeyValue and seek to the next column skipping older versions. + */ + INCLUDE_AND_NEXT_COL, + /** * Skip this KeyValue */ SKIP, Index: hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java =================================================================== --- hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java (revision 1401545) +++ hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java (working copy) @@ -59,7 +59,7 @@ @Override public ReturnCode filterKeyValue(KeyValue v) { this.count++; - return filterAllRemaining() ? ReturnCode.SKIP: ReturnCode.INCLUDE; + return filterAllRemaining() ? ReturnCode.NEXT_COL : ReturnCode.INCLUDE_AND_NEXT_COL; } @Override