From 2e45fbfe01027af3bad28e8729edb7061de5ab3b Mon Sep 17 00:00:00 2001 From: Sahil Aggarwal Date: Tue, 26 Jun 2018 09:39:16 +0530 Subject: [PATCH] HBASE-19164: Remove UUID.randomUUID in tests. --- .../org/apache/hadoop/hbase/HBaseCommonTestingUtility.java | 8 +++++++- .../TestReplicationSyncUpToolWithBulkLoadedData.java | 2 +- .../org/apache/hadoop/hbase/RandomStringGeneratorImpl.java | 3 ++- .../hadoop/hbase/test/IntegrationTestBigLinkedList.java | 6 ++++-- .../test/IntegrationTestBigLinkedListWithVisibility.java | 6 +++--- .../hadoop/hbase/test/IntegrationTestReplication.java | 7 ++++--- .../mapreduce/TestImportTSVWithOperationAttributes.java | 4 ++-- .../hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java | 2 +- .../hbase/mapreduce/TestImportTSVWithVisibilityLabels.java | 14 +++++++------- .../org/apache/hadoop/hbase/mapreduce/TestImportTsv.java | 2 +- .../java/org/apache/hadoop/hbase/HBaseTestingUtility.java | 2 +- .../apache/hadoop/hbase/TestHBaseOnOtherDfsCluster.java | 2 +- .../org/apache/hadoop/hbase/TestNodeHealthCheckChore.java | 2 +- .../org/apache/hadoop/hbase/client/TestFromClientSide.java | 8 ++++---- .../apache/hadoop/hbase/client/TestSnapshotWithAcl.java | 8 ++++---- .../replication/TestReplicationAdminWithClusters.java | 2 +- .../apache/hadoop/hbase/io/hfile/TestHFileEncryption.java | 2 +- .../apache/hadoop/hbase/master/TestSplitLogManager.java | 4 ++-- .../hbase/master/snapshot/TestSnapshotFileCache.java | 2 +- .../java/org/apache/hadoop/hbase/mob/TestMobFileName.java | 5 ++++- .../hbase/mob/compactions/TestPartitionedMobCompactor.java | 8 ++++---- .../apache/hadoop/hbase/regionserver/TestClusterId.java | 2 +- .../org/apache/hadoop/hbase/regionserver/TestHRegion.java | 2 +- .../hadoop/hbase/regionserver/TestHRegionReplayEvents.java | 2 +- .../hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java | 2 +- .../hbase/replication/SerialReplicationTestBase.java | 2 +- .../hadoop/hbase/replication/TestReplicationEndpoint.java | 2 +- .../TestRaceWhenCreatingReplicationSource.java | 2 +- .../hbase/security/access/TestAccessControlFilter.java | 6 +++--- .../hbase/security/access/TestScanEarlyTermination.java | 10 +++++----- .../org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java | 2 +- .../java/org/apache/hadoop/hbase/util/TestFSUtils.java | 10 +++++----- .../java/org/apache/hadoop/hbase/util/TestFSVisitor.java | 2 +- .../hadoop/hbase/util/TestRegionSplitCalculator.java | 5 +++-- .../org/apache/hadoop/hbase/HBaseZKTestingUtility.java | 2 +- 35 files changed, 82 insertions(+), 68 deletions(-) diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java index 74f653c0da..6789b8d07d 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java @@ -23,6 +23,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.UUID; +import java.util.concurrent.ThreadLocalRandom; import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; @@ -144,9 +145,14 @@ public class HBaseCommonTestingUtility { * @see #getBaseTestDir() */ public Path getRandomDir() { - return new Path(getBaseTestDir(), UUID.randomUUID().toString()); + return new Path(getBaseTestDir(), getRandomUUID().toString()); } + public UUID getRandomUUID() { + return new UUID(ThreadLocalRandom.current().nextLong(), ThreadLocalRandom.current().nextLong()); + } + + protected void createSubDir(String propertyName, Path parent, String subDirName) { Path newPath = new Path(parent, subDirName); File newDir = new File(newPath.toString()).getAbsoluteFile(); diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java index e248e9af97..0ea0de65a7 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java @@ -85,7 +85,7 @@ public class TestReplicationSyncUpToolWithBulkLoadedData extends TestReplication Iterator randomHFileRangeListIterator = null; Set randomHFileRanges = new HashSet<>(16); for (int i = 0; i < 16; i++) { - randomHFileRanges.add(UUID.randomUUID().toString()); + randomHFileRanges.add(utility1.getRandomUUID().toString()); } List randomHFileRangeList = new ArrayList<>(randomHFileRanges); Collections.sort(randomHFileRangeList); diff --git a/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/RandomStringGeneratorImpl.java b/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/RandomStringGeneratorImpl.java index 8e3b71d077..80f0ccaadc 100644 --- a/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/RandomStringGeneratorImpl.java +++ b/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/RandomStringGeneratorImpl.java @@ -20,13 +20,14 @@ package org.apache.hadoop.hbase; import java.util.UUID; +import java.util.concurrent.ThreadLocalRandom; public class RandomStringGeneratorImpl implements RandomStringGenerator { private final String s; public RandomStringGeneratorImpl() { - s = UUID.randomUUID().toString(); + s = new UUID(ThreadLocalRandom.current().nextLong(), ThreadLocalRandom.current().nextLong()).toString(); } @Override diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java index 35bc7a1a42..281959c55d 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java @@ -226,6 +226,8 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { private static final String CONCURRENT_WALKER_KEY = "IntegrationTestBigLinkedList.generator.concurrentwalkers"; + protected static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + protected int NUM_SLAVES_BASE = 3; // number of slaves for the cluster private static final int MISSING_ROWS_TO_LOG = 10; // YARN complains when too many counters @@ -1466,7 +1468,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { String outputDir, Integer width, Integer wrapMultiplier, Integer numWalkers) throws Exception { Path outputPath = new Path(outputDir); - UUID uuid = UUID.randomUUID(); //create a random UUID. + UUID uuid = TEST_UTIL.getRandomUUID(); //create a random UUID. Path generatorOutput = new Path(outputPath, uuid.toString()); Generator generator = new Generator(); @@ -1486,7 +1488,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { protected void runVerify(String outputDir, int numReducers, long expectedNumNodes) throws Exception { Path outputPath = new Path(outputDir); - UUID uuid = UUID.randomUUID(); //create a random UUID. + UUID uuid = TEST_UTIL.getRandomUUID(); //create a random UUID. Path iterationOutput = new Path(outputPath, uuid.toString()); Verify verify = new Verify(); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java index a8c595a5fd..d2195a57ab 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java @@ -481,7 +481,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB protected void runGenerator(int numMappers, long numNodes, String outputDir, Integer width, Integer wrapMultiplier, Integer numWalkers) throws Exception { Path outputPath = new Path(outputDir); - UUID uuid = UUID.randomUUID(); // create a random UUID. + UUID uuid = TEST_UTIL.getRandomUUID(); // create a random UUID. Path generatorOutput = new Path(outputPath, uuid.toString()); Generator generator = new VisibilityGenerator(); @@ -511,7 +511,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB for (int i = 0; i < DEFAULT_TABLES_COUNT; i++) { LOG.info("Verifying table " + i); sleep(SLEEP_IN_MS); - UUID uuid = UUID.randomUUID(); // create a random UUID. + UUID uuid = TEST_UTIL.getRandomUUID(); // create a random UUID. Path iterationOutput = new Path(outputPath, uuid.toString()); Verify verify = new VisibilityVerify(getTableName(i).getNameAsString(), i); verify(numReducers, expectedNumNodes, iterationOutput, verify); @@ -545,7 +545,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB LOG.info("Verifying common table with index " + index); sleep(SLEEP_IN_MS); Path outputPath = new Path(outputDir); - UUID uuid = UUID.randomUUID(); // create a random UUID. + UUID uuid = TEST_UTIL.getRandomUUID(); // create a random UUID. Path iterationOutput = new Path(outputPath, uuid.toString()); Verify verify = new VisibilityVerify(TableName.valueOf(COMMON_TABLE_NAME).getNameAsString(), index); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java index 39a1d5a2e3..267c312d98 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java @@ -54,7 +54,8 @@ import java.util.UUID; * linked list in one cluster and verifies that the data is correct in a sink cluster. The test * handles creating the tables and schema and setting up the replication. */ -public class IntegrationTestReplication extends IntegrationTestBigLinkedList { +public class +IntegrationTestReplication extends IntegrationTestBigLinkedList { protected String sourceClusterIdString; protected String sinkClusterIdString; protected int numIterations; @@ -264,7 +265,7 @@ public class IntegrationTestReplication extends IntegrationTestBigLinkedList { */ protected void runGenerator() throws Exception { Path outputPath = new Path(outputDir); - UUID uuid = UUID.randomUUID(); //create a random UUID. + UUID uuid = util.getRandomUUID(); //create a random UUID. Path generatorOutput = new Path(outputPath, uuid.toString()); Generator generator = new Generator(); @@ -288,7 +289,7 @@ public class IntegrationTestReplication extends IntegrationTestBigLinkedList { */ protected void runVerify(long expectedNumNodes) throws Exception { Path outputPath = new Path(outputDir); - UUID uuid = UUID.randomUUID(); //create a random UUID. + UUID uuid = util.getRandomUUID(); //create a random UUID. Path iterationOutput = new Path(outputPath, uuid.toString()); Verify verify = new Verify(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java index 9951e64ddf..fbed112b3c 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java @@ -123,7 +123,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable { @Test public void testMROnTable() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); // Prepare the arguments required for the test. String[] args = new String[] { @@ -139,7 +139,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable { @Test public void testMROnTableWithInvalidOperationAttr() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); // Prepare the arguments required for the test. String[] args = new String[] { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java index aaa495efcc..2d447d19f5 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java @@ -111,7 +111,7 @@ public class TestImportTSVWithTTLs implements Configurable { @Test public void testMROnTable() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); // Prepare the arguments required for the test. String[] args = new String[] { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java index 2e5f3be21b..537ae29c12 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java @@ -161,7 +161,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable { @Test public void testMROnTable() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); // Prepare the arguments required for the test. String[] args = new String[] { @@ -177,7 +177,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable { @Test public void testMROnTableWithDeletes() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); // Prepare the arguments required for the test. String[] args = new String[] { @@ -229,7 +229,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable { @Test public void testMROnTableWithBulkload() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles"); // Prepare the arguments required for the test. String[] args = new String[] { @@ -245,7 +245,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable { @Test public void testBulkOutputWithTsvImporterTextMapper() throws Exception { - final TableName table = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName table = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); String FAMILY = "FAM"; Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table.getNameAsString()),"hfiles"); // Prepare the arguments required for the test. @@ -266,7 +266,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable { @Test public void testMRWithOutputFormat() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles"); // Prepare the arguments required for the test. String[] args = new String[] { @@ -283,7 +283,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable { @Test public void testBulkOutputWithInvalidLabels() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles"); // Prepare the arguments required for the test. String[] args = @@ -301,7 +301,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable { @Test public void testBulkOutputWithTsvImporterTextMapperWithInvalidLabels() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles"); // Prepare the arguments required for the test. String[] args = diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java index 68c6b6b4bc..1c1a5e74a8 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java @@ -118,7 +118,7 @@ public class TestImportTsv implements Configurable { @Before public void setup() throws Exception { - tn = TableName.valueOf("test-" + UUID.randomUUID()); + tn = TableName.valueOf("test-" + util.getRandomUUID()); args = new HashMap<>(); // Prepare the arguments required for the test. args.put(ImportTsv.COLUMNS_CONF_KEY, "HBASE_ROW_KEY,FAM:A,FAM:B"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 7cc933325e..559b99ee2d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -551,7 +551,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { //the working directory, and create a unique sub dir there FileSystem fs = getTestFileSystem(); Path newDataTestDir; - String randomStr = UUID.randomUUID().toString(); + String randomStr = getRandomUUID().toString(); if (fs.getUri().getScheme().equals(FileSystem.getLocal(conf).getUri().getScheme())) { newDataTestDir = new Path(getDataTestDir(), randomStr); File dataTestDir = new File(newDataTestDir.toString()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseOnOtherDfsCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseOnOtherDfsCluster.java index 9234ea6801..331c5baf4d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseOnOtherDfsCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseOnOtherDfsCluster.java @@ -68,7 +68,7 @@ public class TestHBaseOnOtherDfsCluster { targetFs = FileSystem.get(util2.getConfiguration()); assertFsSameUri(fs, targetFs); - Path randomFile = new Path("/"+UUID.randomUUID()); + Path randomFile = new Path("/"+util1.getRandomUUID()); assertTrue(targetFs.createNewFile(randomFile)); assertTrue(fs.exists(randomFile)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java index c3a6f0c4b7..53855737a0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java @@ -144,7 +144,7 @@ public class TestNodeHealthCheckChore { throw new IOException("Failed mkdirs " + tempDir); } } - String scriptName = "HealthScript" + UUID.randomUUID().toString() + String scriptName = "HealthScript" + UTIL.getRandomUUID().toString() + (Shell.WINDOWS ? ".cmd" : ".sh"); healthScriptFile = new File(tempDir.getAbsolutePath(), scriptName); conf.set(HConstants.HEALTH_SCRIPT_LOC, healthScriptFile.getAbsolutePath()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java index 100df38a44..daeb97bee5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java @@ -357,9 +357,9 @@ public class TestFromClientSide { Table ht = TEST_UTIL.createTable(tableName, FAMILIES); String value = "this is the value"; String value2 = "this is some other value"; - String keyPrefix1 = UUID.randomUUID().toString(); - String keyPrefix2 = UUID.randomUUID().toString(); - String keyPrefix3 = UUID.randomUUID().toString(); + String keyPrefix1 = TEST_UTIL.getRandomUUID().toString(); + String keyPrefix2 = TEST_UTIL.getRandomUUID().toString(); + String keyPrefix3 = TEST_UTIL.getRandomUUID().toString(); putRows(ht, 3, value, keyPrefix1); putRows(ht, 3, value, keyPrefix2); putRows(ht, 3, value, keyPrefix3); @@ -449,7 +449,7 @@ public class TestFromClientSide { private void putRows(Table ht, int numRows, String value, String key) throws IOException { for (int i = 0; i < numRows; i++) { - String row = key + "_" + UUID.randomUUID().toString(); + String row = key + "_" + TEST_UTIL.getRandomUUID().toString(); System.out.println(String.format("Saving row: %s, with value %s", row, value)); Put put = new Put(Bytes.toBytes(row)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java index 2cd2004eb8..d7d93c876a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java @@ -52,7 +52,7 @@ public class TestSnapshotWithAcl extends SecureTestUtil { public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestSnapshotWithAcl.class); - public TableName TEST_TABLE = TableName.valueOf(UUID.randomUUID().toString()); + public TableName TEST_TABLE = TableName.valueOf(TEST_UTIL.getRandomUUID().toString()); private static final int ROW_COUNT = 30000; @@ -197,11 +197,11 @@ public class TestSnapshotWithAcl extends SecureTestUtil { loadData(); verifyRows(TEST_TABLE); - String snapshotName1 = UUID.randomUUID().toString(); + String snapshotName1 = TEST_UTIL.getRandomUUID().toString(); admin.snapshot(snapshotName1, TEST_TABLE); // clone snapshot with restoreAcl true. - TableName tableName1 = TableName.valueOf(UUID.randomUUID().toString()); + TableName tableName1 = TableName.valueOf(TEST_UTIL.getRandomUUID().toString()); admin.cloneSnapshot(snapshotName1, tableName1, true); verifyRows(tableName1); verifyAllowed(new AccessReadAction(tableName1), USER_OWNER, USER_RO, USER_RW); @@ -210,7 +210,7 @@ public class TestSnapshotWithAcl extends SecureTestUtil { verifyDenied(new AccessWriteAction(tableName1), USER_RO, USER_NONE); // clone snapshot with restoreAcl false. - TableName tableName2 = TableName.valueOf(UUID.randomUUID().toString()); + TableName tableName2 = TableName.valueOf(TEST_UTIL.getRandomUUID().toString()); admin.cloneSnapshot(snapshotName1, tableName2, false); verifyRows(tableName2); verifyAllowed(new AccessReadAction(tableName2), USER_OWNER); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java index 268fe0015b..e5743a886e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java @@ -299,7 +299,7 @@ public class TestReplicationAdminWithClusters extends TestReplicationBase { @Override public UUID getPeerUUID() { - return UUID.randomUUID(); + return utility1.getRandomUUID(); } @Override diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java index 23fe905414..ba238c73b1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java @@ -221,7 +221,7 @@ public class TestHFileEncryption { .build(); // write a new test HFile LOG.info("Writing with " + fileContext); - Path path = new Path(TEST_UTIL.getDataTestDir(), UUID.randomUUID().toString() + ".hfile"); + Path path = new Path(TEST_UTIL.getDataTestDir(), TEST_UTIL.getRandomUUID().toString() + ".hfile"); FSDataOutputStream out = fs.create(path); HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf) .withOutputStream(out) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java index c611bed1d3..f6d2381711 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java @@ -124,7 +124,7 @@ public class TestSplitLogManager { conf = TEST_UTIL.getConfiguration(); // Use a different ZK wrapper instance for each tests. zkw = - new ZKWatcher(conf, "split-log-manager-tests" + UUID.randomUUID().toString(), null); + new ZKWatcher(conf, "split-log-manager-tests" + TEST_UTIL.getRandomUUID().toString(), null); master = new DummyMasterServices(zkw, conf); ZKUtil.deleteChildrenRecursively(zkw, zkw.getZNodePaths().baseZNode); @@ -523,7 +523,7 @@ public class TestSplitLogManager { Path logDirPath = new Path(new Path(dir, HConstants.HREGION_LOGDIR_NAME), serverName); fs.mkdirs(logDirPath); // create an empty log file - String logFile = new Path(logDirPath, UUID.randomUUID().toString()).toString(); + String logFile = new Path(logDirPath, TEST_UTIL.getRandomUUID().toString()).toString(); fs.create(new Path(logDirPath, logFile)).close(); // spin up a thread mocking split done. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java index cc5e755709..5589f0d5c5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java @@ -179,7 +179,7 @@ public class TestSnapshotFileCache { // add a random file to make sure we refresh - FileStatus randomFile = mockStoreFile(UUID.randomUUID().toString()); + FileStatus randomFile = mockStoreFile(UTIL.getRandomUUID().toString()); allStoreFiles.add(randomFile); deletableFiles = cache.getUnreferencedFiles(allStoreFiles, null); assertEquals(randomFile, Iterables.getOnlyElement(deletableFiles)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileName.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileName.java index 22a47ba9f8..ea0dac08cd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileName.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileName.java @@ -26,6 +26,7 @@ import java.util.Date; import java.util.Random; import java.util.UUID; import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.MD5Hash; @@ -41,6 +42,8 @@ public class TestMobFileName { public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestMobFileName.class); + private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + private String uuid; private Date date; private String dateStr; @@ -49,7 +52,7 @@ public class TestMobFileName { @Before public void setUp() { Random random = new Random(); - uuid = UUID.randomUUID().toString().replaceAll("-", ""); + uuid = TEST_UTIL.getRandomUUID().toString().replaceAll("-", ""); date = new Date(); dateStr = MobUtils.formatDate(date); startKey = Bytes.toBytes(random.nextInt()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java index 94c35f485a..7d0e1af90e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java @@ -136,8 +136,8 @@ public class TestPartitionedMobCompactor { Path testDir = FSUtils.getRootDir(conf); Path mobTestDir = new Path(testDir, MobConstants.MOB_DIR_NAME); basePath = new Path(new Path(mobTestDir, tableName), family); - mobSuffix = UUID.randomUUID().toString().replaceAll("-", ""); - delSuffix = UUID.randomUUID().toString().replaceAll("-", "") + "_del"; + mobSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", ""); + delSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "") + "_del"; allFiles.clear(); mobFiles.clear(); delFiles.clear(); @@ -832,8 +832,8 @@ public class TestPartitionedMobCompactor { if (sameStartKey) { // When creating multiple files under one partition, suffix needs to be different. startRow = Bytes.toBytes(startKey); - mobSuffix = UUID.randomUUID().toString().replaceAll("-", ""); - delSuffix = UUID.randomUUID().toString().replaceAll("-", "") + "_del"; + mobSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", ""); + delSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "") + "_del"; } else { startRow = Bytes.toBytes(startKey + i); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestClusterId.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestClusterId.java index 21e7ac19ce..860f966440 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestClusterId.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestClusterId.java @@ -103,7 +103,7 @@ public class TestClusterId { FSDataOutputStream s = null; try { s = fs.create(filePath); - s.writeUTF(UUID.randomUUID().toString()); + s.writeUTF(TEST_UTIL.getRandomUUID().toString()); } finally { if (s != null) { s.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 1ff6b273d9..f2824dd591 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -4694,7 +4694,7 @@ public class TestHRegion { // XXX: The spied AsyncFSWAL can not work properly because of a Mockito defect that can not // deal with classes which have a field of an inner class. See discussions in HBASE-15536. walConf.set(WALFactory.WAL_PROVIDER, "filesystem"); - final WALFactory wals = new WALFactory(walConf, UUID.randomUUID().toString()); + final WALFactory wals = new WALFactory(walConf, TEST_UTIL.getRandomUUID().toString()); final WAL wal = spy(wals.getWAL(RegionInfoBuilder.newBuilder(tableName).build())); this.region = initHRegion(tableName, HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW, false, tableDurability, wal, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java index 3b4ce50039..828e686461 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java @@ -1643,7 +1643,7 @@ public class TestHRegionReplayEvents { byte[] valueBytes) throws IOException { HFile.WriterFactory hFileFactory = HFile.getWriterFactoryNoCache(TEST_UTIL.getConfiguration()); // TODO We need a way to do this without creating files - Path testFile = new Path(testPath, UUID.randomUUID().toString()); + Path testFile = new Path(testPath, TEST_UTIL.getRandomUUID().toString()); FSDataOutputStream out = TEST_UTIL.getTestFileSystem().create(testFile); try { hFileFactory.withOutputStream(out); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java index 5336963dba..0929e31858 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java @@ -408,7 +408,7 @@ public abstract class AbstractTestFSWAL { } // Add any old cluster id. List clusterIds = new ArrayList<>(1); - clusterIds.add(UUID.randomUUID()); + clusterIds.add(TEST_UTIL.getRandomUUID()); // Now make appends run slow. goslow.set(true); for (int i = 0; i < countPerFamily; i++) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/SerialReplicationTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/SerialReplicationTestBase.java index 259914e91c..1b98518728 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/SerialReplicationTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/SerialReplicationTestBase.java @@ -72,7 +72,7 @@ public class SerialReplicationTestBase { public static final class LocalReplicationEndpoint extends BaseReplicationEndpoint { - private static final UUID PEER_UUID = UUID.randomUUID(); + private static final UUID PEER_UUID = UTIL.getRandomUUID(); @Override public UUID getPeerUUID() { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java index aeed99b528..5d833cc5a9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java @@ -401,7 +401,7 @@ public class TestReplicationEndpoint extends TestReplicationBase { } public static class ReplicationEndpointForTest extends BaseReplicationEndpoint { - static UUID uuid = UUID.randomUUID(); + static UUID uuid = utility1.getRandomUUID(); static AtomicInteger contructedCount = new AtomicInteger(); static AtomicInteger startedCount = new AtomicInteger(); static AtomicInteger stoppedCount = new AtomicInteger(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRaceWhenCreatingReplicationSource.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRaceWhenCreatingReplicationSource.java index 3ef9215792..bd800a841f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRaceWhenCreatingReplicationSource.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRaceWhenCreatingReplicationSource.java @@ -82,7 +82,7 @@ public class TestRaceWhenCreatingReplicationSource { public static final class LocalReplicationEndpoint extends BaseReplicationEndpoint { - private static final UUID PEER_UUID = UUID.randomUUID(); + private static final UUID PEER_UUID = UTIL.getRandomUUID(); @Override public UUID getPeerUUID() { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java index e6f07f1dc5..b2490ec52f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java @@ -132,7 +132,7 @@ public class TestAccessControlFilter extends SecureTestUtil { public Object run() throws Exception { Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); // force a new RS connection - conf.set("testkey", UUID.randomUUID().toString()); + conf.set("testkey", TEST_UTIL.getRandomUUID().toString()); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(TABLE); try { @@ -161,7 +161,7 @@ public class TestAccessControlFilter extends SecureTestUtil { public Object run() throws Exception { Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); // force a new RS connection - conf.set("testkey", UUID.randomUUID().toString()); + conf.set("testkey", TEST_UTIL.getRandomUUID().toString()); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(TABLE); try { @@ -189,7 +189,7 @@ public class TestAccessControlFilter extends SecureTestUtil { public Object run() throws Exception { Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); // force a new RS connection - conf.set("testkey", UUID.randomUUID().toString()); + conf.set("testkey", TEST_UTIL.getRandomUUID().toString()); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(TABLE); try { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java index 96d6b994ff..f0507b24ea 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java @@ -157,7 +157,7 @@ public class TestScanEarlyTermination extends SecureTestUtil { @Override public Object run() throws Exception { // force a new RS connection - conf.set("testkey", UUID.randomUUID().toString()); + conf.set("testkey", TEST_UTIL.getRandomUUID().toString()); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(TEST_TABLE.getTableName()); try { @@ -184,7 +184,7 @@ public class TestScanEarlyTermination extends SecureTestUtil { @Override public Object run() throws Exception { // force a new RS connection - conf.set("testkey", UUID.randomUUID().toString()); + conf.set("testkey", TEST_UTIL.getRandomUUID().toString()); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(TEST_TABLE.getTableName()); try { @@ -210,7 +210,7 @@ public class TestScanEarlyTermination extends SecureTestUtil { @Override public Object run() throws Exception { // force a new RS connection - conf.set("testkey", UUID.randomUUID().toString()); + conf.set("testkey", TEST_UTIL.getRandomUUID().toString()); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(TEST_TABLE.getTableName()); try { @@ -234,7 +234,7 @@ public class TestScanEarlyTermination extends SecureTestUtil { @Override public Object run() throws Exception { // force a new RS connection - conf.set("testkey", UUID.randomUUID().toString()); + conf.set("testkey", TEST_UTIL.getRandomUUID().toString()); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(TEST_TABLE.getTableName()); try { @@ -262,7 +262,7 @@ public class TestScanEarlyTermination extends SecureTestUtil { @Override public Object run() throws Exception { // force a new RS connection - conf.set("testkey", UUID.randomUUID().toString()); + conf.set("testkey", TEST_UTIL.getRandomUUID().toString()); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(TEST_TABLE.getTableName()); try { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java index 87d718e139..f5d5d3a5df 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java @@ -415,7 +415,7 @@ public class BaseTestHBaseFsck { MobFileName mobFileName = MobFileName.create(oldFileName); String startKey = mobFileName.getStartKey(); String date = mobFileName.getDate(); - return MobFileName.create(startKey, date, UUID.randomUUID().toString().replaceAll("-", "")) + return MobFileName.create(startKey, date, TEST_UTIL.getRandomUUID().toString().replaceAll("-", "")) .getFileName(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java index a862c8c450..aabb5a6620 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java @@ -249,7 +249,7 @@ public class TestFSUtils { assertEquals(new FsPermission("700"), filePerm); // then that the correct file is created - Path p = new Path("target" + File.separator + UUID.randomUUID().toString()); + Path p = new Path("target" + File.separator + htu.getRandomUUID().toString()); try { FSDataOutputStream out = FSUtils.create(conf, fs, p, filePerm, null); out.close(); @@ -268,7 +268,7 @@ public class TestFSUtils { conf.setBoolean(HConstants.ENABLE_DATA_FILE_UMASK, true); FsPermission perms = FSUtils.getFilePermissions(fs, conf, HConstants.DATA_FILE_UMASK_KEY); // then that the correct file is created - String file = UUID.randomUUID().toString(); + String file = htu.getRandomUUID().toString(); Path p = new Path(htu.getDataTestDir(), "temptarget" + File.separator + file); Path p1 = new Path(htu.getDataTestDir(), "temppath" + File.separator + file); try { @@ -309,7 +309,7 @@ public class TestFSUtils { FileSystem fs = FileSystem.get(conf); Path testDir = htu.getDataTestDirOnTestFS("testArchiveFile"); - String file = UUID.randomUUID().toString(); + String file = htu.getRandomUUID().toString(); Path p = new Path(testDir, file); FSDataOutputStream out = fs.create(p); @@ -323,7 +323,7 @@ public class TestFSUtils { mockEnv.setValue(expect); EnvironmentEdgeManager.injectEdge(mockEnv); try { - String dstFile = UUID.randomUUID().toString(); + String dstFile = htu.getRandomUUID().toString(); Path dst = new Path(testDir , dstFile); assertTrue(FSUtils.renameAndSetModifyTime(fs, p, dst)); @@ -369,7 +369,7 @@ public class TestFSUtils { FSUtils.setStoragePolicy(fs, conf, testDir, HConstants.WAL_STORAGE_POLICY, HConstants.DEFAULT_WAL_STORAGE_POLICY); - String file = UUID.randomUUID().toString(); + String file =htu.getRandomUUID().toString(); Path p = new Path(testDir, file); WriteDataToHDFS(fs, p, 4096); // will assert existance before deleting. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java index aa2a745375..506dc49971 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java @@ -118,7 +118,7 @@ public class TestFSVisitor { Path familyDir = new Path(regionDir, familyName); fs.mkdirs(familyDir); for (int h = 0; h < 5; ++h) { - String hfileName = UUID.randomUUID().toString().replaceAll("-", ""); + String hfileName = TEST_UTIL.getRandomUUID().toString().replaceAll("-", ""); tableHFiles.add(hfileName); fs.createNewFile(new Path(familyDir, hfileName)); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java index ece83bc4d4..553abf4d6c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java @@ -27,6 +27,7 @@ import java.util.List; import java.util.SortedSet; import java.util.UUID; import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.ClassRule; @@ -46,7 +47,7 @@ public class TestRegionSplitCalculator { HBaseClassTestRule.forClass(TestRegionSplitCalculator.class); private static final Logger LOG = LoggerFactory.getLogger(TestRegionSplitCalculator.class); - + private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); /** * This is range uses a user specified start and end keys. It also has an * extra tiebreaker so that different ranges with the same start/end key pair @@ -59,7 +60,7 @@ public class TestRegionSplitCalculator { SimpleRange(byte[] start, byte[] end) { this.start = start; this.end = end; - this.tiebreaker = UUID.randomUUID(); + this.tiebreaker = TEST_UTIL.getRandomUUID(); } @Override diff --git a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java index b755c32c44..6ddfc094a1 100644 --- a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java +++ b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java @@ -76,7 +76,7 @@ public class HBaseZKTestingUtility extends HBaseCommonTestingUtility { // Using randomUUID ensures that multiple clusters can be launched by // a same test, if it stops & starts them - Path testDir = getDataTestDir("cluster_" + UUID.randomUUID().toString()); + Path testDir = getDataTestDir("cluster_" + getRandomUUID().toString()); clusterTestDir = new File(testDir.toString()).getAbsoluteFile(); // Have it cleaned up on exit boolean b = deleteOnExit(); -- 2.16.2