From 5ec1cebae62c6e7131ee66bd30f26749fffdab64 Mon Sep 17 00:00:00 2001 From: Mike Drob Date: Mon, 6 Nov 2017 21:16:48 -0600 Subject: [PATCH] HBASE-19195 error-prone fixes for client, mr, and server --- .../hadoop/hbase/filter/FilterListWithAND.java | 96 +++++++++++----------- .../hbase/mapreduce/TableInputFormatBase.java | 5 +- .../apache/hadoop/hbase/PerformanceEvaluation.java | 10 ++- .../hadoop/hbase/client/TestFromClientSide3.java | 29 ++++--- .../apache/hadoop/hbase/mapred/TestSplitTable.java | 8 +- .../hadoop/hbase/mapreduce/TestImportTsv.java | 3 +- .../TsvImporterCustomTestMapperForOprAttr.java | 9 +- .../hbase/regionserver/TestMajorCompaction.java | 1 + 8 files changed, 90 insertions(+), 71 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java index ca2c149ed7..19e4d3a54e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java @@ -90,57 +90,59 @@ public class FilterListWithAND extends FilterListBase { * code of current sub-filter. */ private ReturnCode mergeReturnCode(ReturnCode rc, ReturnCode localRC) { - if (rc == ReturnCode.SEEK_NEXT_USING_HINT || localRC == ReturnCode.SEEK_NEXT_USING_HINT) { + if (rc == ReturnCode.SEEK_NEXT_USING_HINT) { return ReturnCode.SEEK_NEXT_USING_HINT; } switch (localRC) { - case INCLUDE: - return rc; - case INCLUDE_AND_NEXT_COL: - if (isInReturnCodes(rc, ReturnCode.INCLUDE, ReturnCode.INCLUDE_AND_NEXT_COL)) { - return ReturnCode.INCLUDE_AND_NEXT_COL; - } - if (isInReturnCodes(rc, ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW)) { - return ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW; - } - if (isInReturnCodes(rc, ReturnCode.SKIP, ReturnCode.NEXT_COL)) { - return ReturnCode.NEXT_COL; - } - if (isInReturnCodes(rc, ReturnCode.NEXT_ROW)) { + case SEEK_NEXT_USING_HINT: + return ReturnCode.SEEK_NEXT_USING_HINT; + case INCLUDE: + return rc; + case INCLUDE_AND_NEXT_COL: + if (isInReturnCodes(rc, ReturnCode.INCLUDE, ReturnCode.INCLUDE_AND_NEXT_COL)) { + return ReturnCode.INCLUDE_AND_NEXT_COL; + } + if (isInReturnCodes(rc, ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW)) { + return ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW; + } + if (isInReturnCodes(rc, ReturnCode.SKIP, ReturnCode.NEXT_COL)) { + return ReturnCode.NEXT_COL; + } + if (isInReturnCodes(rc, ReturnCode.NEXT_ROW)) { + return ReturnCode.NEXT_ROW; + } + break; + case INCLUDE_AND_SEEK_NEXT_ROW: + if (isInReturnCodes(rc, ReturnCode.INCLUDE, ReturnCode.INCLUDE_AND_NEXT_COL, + ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW)) { + return ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW; + } + if (isInReturnCodes(rc, ReturnCode.SKIP, ReturnCode.NEXT_COL, ReturnCode.NEXT_ROW)) { + return ReturnCode.NEXT_ROW; + } + break; + case SKIP: + if (isInReturnCodes(rc, ReturnCode.INCLUDE, ReturnCode.SKIP)) { + return ReturnCode.SKIP; + } + if (isInReturnCodes(rc, ReturnCode.INCLUDE_AND_NEXT_COL, ReturnCode.NEXT_COL)) { + return ReturnCode.NEXT_COL; + } + if (isInReturnCodes(rc, ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW, ReturnCode.NEXT_ROW)) { + return ReturnCode.NEXT_ROW; + } + break; + case NEXT_COL: + if (isInReturnCodes(rc, ReturnCode.INCLUDE, ReturnCode.INCLUDE_AND_NEXT_COL, ReturnCode.SKIP, + ReturnCode.NEXT_COL)) { + return ReturnCode.NEXT_COL; + } + if (isInReturnCodes(rc, ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW, ReturnCode.NEXT_ROW)) { + return ReturnCode.NEXT_ROW; + } + break; + case NEXT_ROW: return ReturnCode.NEXT_ROW; - } - break; - case INCLUDE_AND_SEEK_NEXT_ROW: - if (isInReturnCodes(rc, ReturnCode.INCLUDE, ReturnCode.INCLUDE_AND_NEXT_COL, - ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW)) { - return ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW; - } - if (isInReturnCodes(rc, ReturnCode.SKIP, ReturnCode.NEXT_COL, ReturnCode.NEXT_ROW)) { - return ReturnCode.NEXT_ROW; - } - break; - case SKIP: - if (isInReturnCodes(rc, ReturnCode.INCLUDE, ReturnCode.SKIP)) { - return ReturnCode.SKIP; - } - if (isInReturnCodes(rc, ReturnCode.INCLUDE_AND_NEXT_COL, ReturnCode.NEXT_COL)) { - return ReturnCode.NEXT_COL; - } - if (isInReturnCodes(rc, ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW, ReturnCode.NEXT_ROW)) { - return ReturnCode.NEXT_ROW; - } - break; - case NEXT_COL: - if (isInReturnCodes(rc, ReturnCode.INCLUDE, ReturnCode.INCLUDE_AND_NEXT_COL, ReturnCode.SKIP, - ReturnCode.NEXT_COL)) { - return ReturnCode.NEXT_COL; - } - if (isInReturnCodes(rc, ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW, ReturnCode.NEXT_ROW)) { - return ReturnCode.NEXT_ROW; - } - break; - case NEXT_ROW: - return ReturnCode.NEXT_ROW; } throw new IllegalStateException( "Received code is not valid. rc: " + rc + ", localRC: " + localRC); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java index 8d12b43357..f4c2f9b1a1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java @@ -286,8 +286,9 @@ extends InputFormat { } //The default value of "hbase.mapreduce.input.autobalance" is false. - if (context.getConfiguration().getBoolean(MAPREDUCE_INPUT_AUTOBALANCE, false) != false) { - long maxAveRegionSize = context.getConfiguration().getInt(MAX_AVERAGE_REGION_SIZE, 8*1073741824); + if (context.getConfiguration().getBoolean(MAPREDUCE_INPUT_AUTOBALANCE, false)) { + long maxAveRegionSize = context.getConfiguration() + .getLong(MAX_AVERAGE_REGION_SIZE, 8L*1073741824); //8GB return calculateAutoBalancedSplits(splits, maxAveRegionSize); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java index fb6701fec5..71bc9ad63b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java @@ -1003,9 +1003,13 @@ public class PerformanceEvaluation extends Configured implements Tool { } int getValueLength(final Random r) { - if (this.opts.isValueRandom()) return Math.abs(r.nextInt() % opts.valueSize); - else if (this.opts.isValueZipf()) return Math.abs(this.zipf.nextInt()); - else return opts.valueSize; + if (this.opts.isValueRandom()) { + return r.nextInt(opts.valueSize); + } else if (this.opts.isValueZipf()) { + return Math.abs(this.zipf.nextInt()); + } else { + return opts.valueSize; + } } void updateValueSize(final Result [] rs) throws IOException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java index 8199525dfa..385d4ad77b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java @@ -79,13 +79,13 @@ public class TestFromClientSide3 { private static byte[] FAMILY = Bytes.toBytes("testFamily"); private static Random random = new Random(); private static int SLAVES = 3; - private static byte [] ROW = Bytes.toBytes("testRow"); + private static final byte [] ROW = Bytes.toBytes("testRow"); private static final byte[] ANOTHERROW = Bytes.toBytes("anotherrow"); - private static byte [] QUALIFIER = Bytes.toBytes("testQualifier"); - private static byte [] VALUE = Bytes.toBytes("testValue"); - private final static byte[] COL_QUAL = Bytes.toBytes("f1"); - private final static byte[] VAL_BYTES = Bytes.toBytes("v1"); - private final static byte[] ROW_BYTES = Bytes.toBytes("r1"); + private static final byte [] QUALIFIER = Bytes.toBytes("testQualifier"); + private static final byte [] VALUE = Bytes.toBytes("testValue"); + private static final byte[] COL_QUAL = Bytes.toBytes("f1"); + private static final byte[] VAL_BYTES = Bytes.toBytes("v1"); + private static final byte[] ROW_BYTES = Bytes.toBytes("r1"); /** * @throws java.lang.Exception @@ -365,7 +365,7 @@ public class TestFromClientSide3 { break; } } catch (Exception e) { - LOG.debug("Waiting for region to come online: " + regionName); + LOG.debug("Waiting for region to come online: " + Bytes.toString(regionName)); } Thread.sleep(40); } @@ -481,6 +481,7 @@ public class TestFromClientSide3 { assertEquals(exist, true); } + @Test public void testHTableExistsMethodSingleRegionMultipleGets() throws Exception { HTable table = TEST_UTIL.createTable( @@ -492,13 +493,11 @@ public class TestFromClientSide3 { List gets = new ArrayList(); gets.add(new Get(ROW)); - gets.add(null); gets.add(new Get(ANOTHERROW)); Boolean[] results = table.exists(gets); - assertEquals(results[0], true); - assertEquals(results[1], false); - assertEquals(results[2], false); + assertTrue(results[0]); + assertFalse(results[1]); } @Test @@ -691,6 +690,7 @@ public class TestFromClientSide3 { cpService.execute(new Runnable() { @Override public void run() { + boolean threw; Put put1 = new Put(row); Put put2 = new Put(rowLocked); put1.addColumn(FAMILY, QUALIFIER, value1); @@ -712,8 +712,13 @@ public class TestFromClientSide3 { return rpcCallback.get(); } }); - fail("This cp should fail because the target lock is blocked by previous put"); + threw = false; } catch (Throwable ex) { + threw = true; + } + if (!threw) { + // Can't call fail() earlier because the catch would eat it. + fail("This cp should fail because the target lock is blocked by previous put"); } } }); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java index 4a37f88201..6106987aca 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java @@ -33,7 +33,7 @@ import org.junit.experimental.categories.Category; public class TestSplitTable { @Test - @SuppressWarnings("deprecation") + @SuppressWarnings({"deprecation", "SelfComparison"}) public void testSplitTableCompareTo() { TableSplit aTableSplit = new TableSplit(Bytes.toBytes("tableA"), Bytes.toBytes("aaa"), Bytes.toBytes("ddd"), "locationA"); @@ -44,9 +44,9 @@ public class TestSplitTable { TableSplit cTableSplit = new TableSplit(Bytes.toBytes("tableA"), Bytes.toBytes("lll"), Bytes.toBytes("zzz"), "locationA"); - assertTrue(aTableSplit.compareTo(aTableSplit) == 0); - assertTrue(bTableSplit.compareTo(bTableSplit) == 0); - assertTrue(cTableSplit.compareTo(cTableSplit) == 0); + assertEquals(0, aTableSplit.compareTo(aTableSplit)); + assertEquals(0, bTableSplit.compareTo(bTableSplit)); + assertEquals(0, cTableSplit.compareTo(cTableSplit)); assertTrue(aTableSplit.compareTo(bTableSplit) < 0); assertTrue(bTableSplit.compareTo(aTableSplit) > 0); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java index 83de8f9c4d..743d859c95 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java @@ -23,6 +23,7 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.IOException; +import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -436,7 +437,7 @@ public class TestImportTsv implements Configurable { // run the import Tool tool = new ImportTsv(); - LOG.debug("Running ImportTsv with arguments: " + argsArray); + LOG.debug("Running ImportTsv with arguments: " + Arrays.toString(argsArray)); assertEquals(0, ToolRunner.run(conf, tool, argsArray)); // Perform basic validation. If the input args did not include diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapperForOprAttr.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapperForOprAttr.java index 9d8b8f0b0e..4050862c99 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapperForOprAttr.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapperForOprAttr.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; +import java.util.Arrays; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Put; @@ -44,10 +45,10 @@ public class TsvImporterCustomTestMapperForOprAttr extends TsvImporterMapper { for (String attr : attributes) { String[] split = attr.split(ImportTsv.DEFAULT_ATTRIBUTES_SEPERATOR); if (split == null || split.length <= 1) { - throw new BadTsvLineException("Invalid attributes seperator specified" + attributes); + throw new BadTsvLineException(msg(attributes)); } else { if (split[0].length() <= 0 || split[1].length() <= 0) { - throw new BadTsvLineException("Invalid attributes seperator specified" + attributes); + throw new BadTsvLineException(msg(attributes)); } put.setAttribute(split[0], Bytes.toBytes(split[1])); } @@ -55,4 +56,8 @@ public class TsvImporterCustomTestMapperForOprAttr extends TsvImporterMapper { } put.add(kv); } + + private String msg(Object[] attributes) { + return "Invalid attributes separator specified: " + Arrays.toString(attributes); + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java index 3edf7c7ce3..e838b4cdb3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java @@ -443,6 +443,7 @@ public class TestMajorCompaction { * basically works. * @throws IOException */ + @Test public void testMajorCompactingToNoOutputWithReverseScan() throws IOException { createStoreFile(r); for (int i = 0; i < compactionThreshold; i++) { -- 2.14.1