diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/Scan.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/Scan.java index a6f45a2..62987cc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/Scan.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/Scan.java @@ -92,6 +92,7 @@ public class Scan extends OperationWithAttributes { private int storeLimit = -1; private int storeOffset = 0; + private boolean getScan; // If application wants to collect scan metrics, it needs to // call scan.setAttribute(SCAN_ATTRIBUTES_ENABLE, Bytes.toBytes(Boolean.TRUE)) @@ -141,6 +142,9 @@ public class Scan extends OperationWithAttributes { public Scan(byte [] startRow, byte [] stopRow) { this.startRow = startRow; this.stopRow = stopRow; + //if the startRow and stopRow both are empty, it is not a Get + this.getScan = this.startRow != null && this.startRow.length > 0 && + Bytes.equals(this.startRow, this.stopRow); } /** @@ -159,6 +163,7 @@ public class Scan extends OperationWithAttributes { caching = scan.getCaching(); maxResultSize = scan.getMaxResultSize(); cacheBlocks = scan.getCacheBlocks(); + getScan = scan.isGetScan(); filter = scan.getFilter(); // clone? loadColumnFamiliesOnDemand = scan.getLoadColumnFamiliesOnDemandValue(); TimeRange ctr = scan.getTimeRange(); @@ -194,11 +199,12 @@ public class Scan extends OperationWithAttributes { this.storeOffset = get.getRowOffsetPerColumnFamily(); this.tr = get.getTimeRange(); this.familyMap = get.getFamilyMap(); + this.getScan = true; } public boolean isGetScan() { - return this.startRow != null && this.startRow.length > 0 && - Bytes.equals(this.startRow, this.stopRow); + return this.getScan || (this.startRow != null && this.startRow.length > 0 && + Bytes.equals(this.startRow, this.stopRow)); } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 4533e20..cb72a2b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -3455,7 +3455,7 @@ public class HRegion implements HeapSize { // , Writable{ } this.batch = scan.getBatch(); - if (Bytes.equals(scan.getStopRow(), HConstants.EMPTY_END_ROW)) { + if (Bytes.equals(scan.getStopRow(), HConstants.EMPTY_END_ROW) && !scan.isGetScan()) { this.stopRow = null; } else { this.stopRow = scan.getStopRow(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetEmptyRowKey.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetEmptyRowKey.java new file mode 100644 index 0000000..fbd56eb --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetEmptyRowKey.java @@ -0,0 +1,88 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.regionserver; + +import static org.junit.Assert.*; + +import java.util.Arrays; + +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.apache.hadoop.hbase.MediumTests; + +@Category(MediumTests.class) +public class TestGetEmptyRowKey { + private final static HBaseTestingUtility UTIL = new HBaseTestingUtility(); + private final static byte[] FAMILY = Bytes.toBytes("f1"); + private final static byte[] COL_QUAL = Bytes.toBytes("f1"); + private final static byte[] VAL_BYTES = Bytes.toBytes("v1"); + private final static byte[] ROW_BYTES = Bytes.toBytes("r1"); + @BeforeClass + public static void beforeClass() throws Exception { + UTIL.startMiniCluster(); + } + + @AfterClass + public static void afterClass() throws Exception { + UTIL.shutdownMiniCluster(); + } + + @Test + public void test() throws Exception { + //Create a table and put in 1 row + HBaseAdmin admin = UTIL.getHBaseAdmin(); + HTableDescriptor desc = new HTableDescriptor(Bytes.toBytes("test")); + desc.addFamily(new HColumnDescriptor(FAMILY)); + admin.createTable(desc); + HTable table = new HTable(UTIL.getConfiguration(), "test"); + + Put put = new Put(ROW_BYTES); + put.add(FAMILY, COL_QUAL, VAL_BYTES); + table.put(put); + table.flushCommits(); + + //Try getting the row with an empty row key + Result res = table.get(new Get(new byte[0])); + assertTrue(res.isEmpty() == true); + res = table.get(new Get(Bytes.toBytes("r1-not-exist"))); + assertTrue(res.isEmpty() == true); + res = table.get(new Get(ROW_BYTES)); + assertTrue(Arrays.equals(res.getValue(FAMILY, COL_QUAL), VAL_BYTES)); + + //Now actually put in a row with an empty row key + put = new Put(new byte[0]); + put.add(FAMILY, COL_QUAL, VAL_BYTES); + table.put(put); + table.flushCommits(); + res = table.get(new Get(new byte[0])); + assertTrue(Arrays.equals(res.getValue(FAMILY, COL_QUAL), VAL_BYTES)); + } +}