diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java index 5262732a45c..f7814957420 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java @@ -223,27 +223,22 @@ public class MultiRowMutationEndpoint extends MultiRowMutationService implements get.setTimeRange(timeRange.getMin(), timeRange.getMax()); } + List result = region.get(get, false); boolean matches = false; - try (RegionScanner scanner = region.getScanner(new Scan(get))) { - // NOTE: Please don't use HRegion.get() instead, - // because it will copy cells to heap. See HBASE-26036 - List result = new ArrayList<>(); - scanner.next(result); - if (filter != null) { - if (!result.isEmpty()) { - matches = true; - } - } else { - boolean valueIsNull = comparator.getValue() == null || comparator.getValue().length == 0; - if (result.isEmpty() && valueIsNull) { - matches = true; - } else if (result.size() > 0 && result.get(0).getValueLength() == 0 && valueIsNull) { - matches = true; - } else if (result.size() == 1 && !valueIsNull) { - Cell kv = result.get(0); - int compareResult = PrivateCellUtil.compareValue(kv, comparator); - matches = matches(op, compareResult); - } + if (filter != null) { + if (!result.isEmpty()) { + matches = true; + } + } else { + boolean valueIsNull = comparator.getValue() == null || comparator.getValue().length == 0; + if (result.isEmpty() && valueIsNull) { + matches = true; + } else if (result.size() > 0 && result.get(0).getValueLength() == 0 && valueIsNull) { + matches = true; + } else if (result.size() == 1 && !valueIsNull) { + Cell kv = result.get(0); + int compareResult = PrivateCellUtil.compareValue(kv, comparator); + matches = matches(op, compareResult); } } return matches; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 1acbf162b09..b1bbf639900 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -3290,23 +3290,18 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi private void updateDeleteLatestVersionTimestamp(Cell cell, Get get, int count, byte[] byteNow) throws IOException { - try (RegionScanner scanner = getScanner(new Scan(get))) { - // NOTE: Please don't use HRegion.get() instead, - // because it will copy cells to heap. See HBASE-26036 - List result = new ArrayList<>(); - scanner.next(result); - - if (result.size() < count) { - // Nothing to delete - PrivateCellUtil.updateLatestStamp(cell, byteNow); - return; - } - if (result.size() > count) { - throw new RuntimeException("Unexpected size: " + result.size()); - } - Cell getCell = result.get(count - 1); - PrivateCellUtil.setTimestamp(cell, getCell.getTimestamp()); + List result = get(get, false); + + if (result.size() < count) { + // Nothing to delete + PrivateCellUtil.updateLatestStamp(cell, byteNow); + return; + } + if (result.size() > count) { + throw new RuntimeException("Unexpected size: " + result.size()); } + Cell getCell = result.get(count - 1); + PrivateCellUtil.setTimestamp(cell, getCell.getTimestamp()); } @Override @@ -4095,64 +4090,60 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi get.setTimeRange(tr.getMin(), tr.getMax()); } - try (RegionScanner scanner = region.getScanner(new Scan(get))) { - // NOTE: Please don't use HRegion.get() instead, - // because it will copy cells to heap. See HBASE-26036 - List currentValues = new ArrayList<>(); - scanner.next(currentValues); - // Iterate the input columns and update existing values if they were found, otherwise - // add new column initialized to the delta amount - int currentValuesIndex = 0; - for (int i = 0; i < deltas.size(); i++) { - Cell delta = deltas.get(i); - Cell currentValue = null; - if (currentValuesIndex < currentValues.size() && CellUtil - .matchingQualifier(currentValues.get(currentValuesIndex), delta)) { - currentValue = currentValues.get(currentValuesIndex); - if (i < (deltas.size() - 1) && !CellUtil.matchingQualifier(delta, deltas.get(i + 1))) { - currentValuesIndex++; - } + List currentValues = region.get(get, false); + + // Iterate the input columns and update existing values if they were found, otherwise + // add new column initialized to the delta amount + int currentValuesIndex = 0; + for (int i = 0; i < deltas.size(); i++) { + Cell delta = deltas.get(i); + Cell currentValue = null; + if (currentValuesIndex < currentValues.size() && + CellUtil.matchingQualifier(currentValues.get(currentValuesIndex), delta)) { + currentValue = currentValues.get(currentValuesIndex); + if (i < (deltas.size() - 1) && !CellUtil.matchingQualifier(delta, deltas.get(i + 1))) { + currentValuesIndex++; } - // Switch on whether this an increment or an append building the new Cell to apply. - Cell newCell; - if (mutation instanceof Increment) { - long deltaAmount = getLongValue(delta); - final long newValue = currentValue == null ? deltaAmount : - getLongValue(currentValue) + deltaAmount; - newCell = reckonDelta(delta, currentValue, columnFamily, now, mutation, - (oldCell) -> Bytes.toBytes(newValue)); - } else { - newCell = reckonDelta(delta, currentValue, columnFamily, now, mutation, - (oldCell) -> ByteBuffer.wrap(new byte[delta.getValueLength() + - oldCell.getValueLength()]) + } + // Switch on whether this an increment or an append building the new Cell to apply. + Cell newCell; + if (mutation instanceof Increment) { + long deltaAmount = getLongValue(delta); + final long newValue = currentValue == null ? + deltaAmount : getLongValue(currentValue) + deltaAmount; + newCell = reckonDelta(delta, currentValue, columnFamily, now, mutation, + (oldCell) -> Bytes.toBytes(newValue)); + } else { + newCell = reckonDelta(delta, currentValue, columnFamily, now, mutation, + (oldCell) -> + ByteBuffer.wrap(new byte[delta.getValueLength() + oldCell.getValueLength()]) .put(oldCell.getValueArray(), oldCell.getValueOffset(), oldCell.getValueLength()) .put(delta.getValueArray(), delta.getValueOffset(), delta.getValueLength()) - .array()); - } - if (region.maxCellSize > 0) { - int newCellSize = PrivateCellUtil.estimatedSerializedSizeOf(newCell); - if (newCellSize > region.maxCellSize) { - String msg = - "Cell with size " + newCellSize + " exceeds limit of " + region.maxCellSize + - " bytes in region " + this; - LOG.debug(msg); - throw new DoNotRetryIOException(msg); - } - } - cellPairs.add(new Pair<>(currentValue, newCell)); - // Add to results to get returned to the Client. If null, cilent does not want results. - if (results != null) { - results.add(newCell); + .array() + ); + } + if (region.maxCellSize > 0) { + int newCellSize = PrivateCellUtil.estimatedSerializedSizeOf(newCell); + if (newCellSize > region.maxCellSize) { + String msg = "Cell with size " + newCellSize + " exceeds limit of " + + region.maxCellSize + " bytes in region " + this; + LOG.debug(msg); + throw new DoNotRetryIOException(msg); } } - // Give coprocessors a chance to update the new cells before apply to WAL or memstore - if (region.coprocessorHost != null) { - // Here the operation must be increment or append. - cellPairs = mutation instanceof Increment ? - region.coprocessorHost.postIncrementBeforeWAL(mutation, cellPairs) : - region.coprocessorHost.postAppendBeforeWAL(mutation, cellPairs); + cellPairs.add(new Pair<>(currentValue, newCell)); + // Add to results to get returned to the Client. If null, cilent does not want results. + if (results != null) { + results.add(newCell); } } + // Give coprocessors a chance to update the new cells before apply to WAL or memstore + if (region.coprocessorHost != null) { + // Here the operation must be increment or append. + cellPairs = mutation instanceof Increment ? + region.coprocessorHost.postIncrementBeforeWAL(mutation, cellPairs) : + region.coprocessorHost.postAppendBeforeWAL(mutation, cellPairs); + } return cellPairs.stream().map(Pair::getSecond).collect(Collectors.toList()); } @@ -4921,32 +4912,26 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi // NOTE: We used to wait here until mvcc caught up: mvcc.await(); // Supposition is that now all changes are done under row locks, then when we go to read, // we'll get the latest on this row. + List result = get(get, false); boolean matches = false; long cellTs = 0; - try (RegionScanner scanner = getScanner(new Scan(get))) { - // NOTE: Please don't use HRegion.get() instead, - // because it will copy cells to heap. See HBASE-26036 - List result = new ArrayList<>(1); - scanner.next(result); - if (filter != null) { - if (!result.isEmpty()) { - matches = true; - cellTs = result.get(0).getTimestamp(); - } - } else { - boolean valueIsNull = - comparator.getValue() == null || comparator.getValue().length == 0; - if (result.isEmpty() && valueIsNull) { - matches = op != CompareOperator.NOT_EQUAL; - } else if (result.size() > 0 && valueIsNull) { - matches = (result.get(0).getValueLength() == 0) == (op != CompareOperator.NOT_EQUAL); - cellTs = result.get(0).getTimestamp(); - } else if (result.size() == 1) { - Cell kv = result.get(0); - cellTs = kv.getTimestamp(); - int compareResult = PrivateCellUtil.compareValue(kv, comparator); - matches = matches(op, compareResult); - } + if (filter != null) { + if (!result.isEmpty()) { + matches = true; + cellTs = result.get(0).getTimestamp(); + } + } else { + boolean valueIsNull = comparator.getValue() == null || comparator.getValue().length == 0; + if (result.isEmpty() && valueIsNull) { + matches = true; + } else if (result.size() > 0 && result.get(0).getValueLength() == 0 && valueIsNull) { + matches = true; + cellTs = result.get(0).getTimestamp(); + } else if (result.size() == 1 && !valueIsNull) { + Cell kv = result.get(0); + cellTs = kv.getTimestamp(); + int compareResult = PrivateCellUtil.compareValue(kv, comparator); + matches = matches(op, compareResult); } } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java index 66dd862b663..630e5db8295 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java @@ -397,24 +397,20 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso } get.setFilter(new DeleteVersionVisibilityExpressionFilter(visibilityTags, VisibilityConstants.SORTED_ORDINAL_SERIALIZATION_FORMAT)); - try (RegionScanner scanner = ctx.getEnvironment().getRegion().getScanner(new Scan(get))) { - // NOTE: Please don't use HRegion.get() instead, - // because it will copy cells to heap. See HBASE-26036 - List result = new ArrayList<>(); - scanner.next(result); - - if (result.size() < get.getMaxVersions()) { - // Nothing to delete - PrivateCellUtil.updateLatestStamp(cell, byteNow); - return; - } - if (result.size() > get.getMaxVersions()) { - throw new RuntimeException("Unexpected size: " + result.size() + - ". Results more than the max versions obtained."); - } - Cell getCell = result.get(get.getMaxVersions() - 1); - PrivateCellUtil.setTimestamp(cell, getCell.getTimestamp()); + List result = ctx.getEnvironment().getRegion().get(get, false); + + if (result.size() < get.getMaxVersions()) { + // Nothing to delete + PrivateCellUtil.updateLatestStamp(cell, byteNow); + return; + } + if (result.size() > get.getMaxVersions()) { + throw new RuntimeException("Unexpected size: " + result.size() + + ". Results more than the max versions obtained."); } + Cell getCell = result.get(get.getMaxVersions() - 1); + PrivateCellUtil.setTimestamp(cell, getCell.getTimestamp()); + // We are bypassing here because in the HRegion.updateDeleteLatestVersionTimeStamp we would // update with the current timestamp after again doing a get. As the hook as already determined // the needed timestamp we need to bypass here. diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutateWithByteBuff.java hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutateWithByteBuff.java index de1d02bfe73..1489c1f0400 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutateWithByteBuff.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutateWithByteBuff.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.io.ByteBuffAllocator; import org.apache.hadoop.hbase.io.DeallocateRewriteByteBuffAllocator; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.BlockCacheFactory; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegionFileSystem; @@ -87,8 +88,20 @@ public class TestCheckAndMutateWithByteBuff { } @Test - public void testCheckAndMutateWithByteBuff() throws Exception { - Table testTable = createTable(TableName.valueOf(name.getMethodName())); + public void testCheckAndMutateWithByteBuffNoEncode() throws Exception { + testCheckAndMutateWithByteBuff(TableName.valueOf(name.getMethodName()), DataBlockEncoding.NONE); + } + + @Test + public void testCheckAndMutateWithByteBuffEncode() throws Exception { + // Tests for HBASE-26777. + // As most HBase.getRegion() calls have been factored out from HBase, you'd need to revert + // both HBASE-26777, and the HBase.get() replacements from HBASE-26036 for this test to fail + testCheckAndMutateWithByteBuff(TableName.valueOf(name.getMethodName()), DataBlockEncoding.FAST_DIFF); + } + + private void testCheckAndMutateWithByteBuff(TableName tableName, DataBlockEncoding dbe) throws Exception { + Table testTable = createTable(tableName, dbe); byte[] checkRow = Bytes.toBytes("checkRow"); byte[] checkQualifier = Bytes.toBytes("cq"); byte[] checkValue = Bytes.toBytes("checkValue"); @@ -104,10 +117,13 @@ public class TestCheckAndMutateWithByteBuff { Bytes.toBytes("testValue")))); } - private Table createTable(TableName tableName) + private Table createTable(TableName tableName, DataBlockEncoding dbe) throws IOException { TableDescriptor td = TableDescriptorBuilder.newBuilder(tableName) - .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(CF).setBlocksize(100).build()) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(CF) + .setBlocksize(100) + .setDataBlockEncoding(dbe) + .build()) .build(); return TEST_UTIL.createTable(td, null); }