diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java index d77f2df..6024aa5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java @@ -331,7 +331,6 @@ public class ClientScanner extends AbstractClientScanner { } if (cache.size() == 0) { Result [] values = null; - long remainingResultSize = maxScannerResultSize; int countdown = this.caching; // We need to reset it if it's a new callable that was created // with a countdown in nextScanner @@ -428,16 +427,12 @@ public class ClientScanner extends AbstractClientScanner { if (values != null && values.length > 0) { for (Result rs : values) { cache.add(rs); - for (Cell kv : rs.rawCells()) { - // TODO make method in Cell or CellUtil - remainingResultSize -= KeyValueUtil.ensureKeyValue(kv).heapSize(); - } countdown--; this.lastResult = rs; } } // Values == null means server-side filter has determined we must STOP - } while (remainingResultSize > 0 && countdown > 0 && nextScanner(countdown, values == null)); + } while (cache.size() == 0 && countdown > 0 && nextScanner(countdown, values == null)); } if (cache.size() > 0) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMaxResultSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMaxResultSize.java new file mode 100644 index 0000000..16f800c --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMaxResultSize.java @@ -0,0 +1,66 @@ +package org.apache.hadoop.hbase.regionserver; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.testclassification.LargeTests; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import static org.junit.Assert.assertEquals; + +@Category(LargeTests.class) +public class TestMaxResultSize { + protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + @BeforeClass + public static void setUpBeforeClass() throws Exception { + Configuration conf = TEST_UTIL.getConfiguration(); + // set max result size to 1 byte + conf.setLong(HConstants.HBASE_CLIENT_SCANNER_MAX_RESULT_SIZE_KEY, 1); + TEST_UTIL.startMiniCluster(1); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + TEST_UTIL.shutdownMiniCluster(); + } + + @Test + public void testMaxResultSize() throws Exception { + byte [] ROW1 = Bytes.toBytes("testRow1"); + byte [] ROW2 = Bytes.toBytes("testRow2"); + byte [] FAMILY = Bytes.toBytes("testFamily"); + byte [] QUALIFIER = Bytes.toBytes("testQualifier"); + byte [] VALUE = Bytes.toBytes("testValue"); + + TableName TABLE = TableName.valueOf("testMaxResultSize"); + byte[][] FAMILIES = new byte[][] { FAMILY }; + Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); + conf.setLong(HConstants.HBASE_CLIENT_SCANNER_MAX_RESULT_SIZE_KEY, Long.MAX_VALUE); + HTable table = TEST_UTIL.createTable(TABLE, FAMILIES, conf); + Put p = new Put(ROW1); + p.add(FAMILY, QUALIFIER, VALUE); + table.put(p); + p = new Put(ROW2); + p.add(FAMILY, QUALIFIER, VALUE); + table.put(p); + + Scan s = new Scan(); + ResultScanner rs = table.getScanner(s); + int count = 0; + while(rs.next() != null) { + count++; + } + assertEquals(2, count); + rs.close(); + table.close(); + } +}