>();
ClassTestFinder classFinder = new ClassTestFinder();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestClusterBootOrder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestClusterBootOrder.java
index 4097efb..24432c4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestClusterBootOrder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestClusterBootOrder.java
@@ -99,7 +99,7 @@ public class TestClusterBootOrder {
* Tests launching the cluster by first starting regionserver, and then the master
* to ensure that it does not matter which is started first.
*/
- @Test
+ @Test (timeout=180000)
public void testBootRegionServerFirst() throws Exception {
startRegionServer();
startMaster();
@@ -110,7 +110,7 @@ public class TestClusterBootOrder {
* Tests launching the cluster by first starting master, and then the regionserver
* to ensure that it does not matter which is started first.
*/
- @Test
+ @Test (timeout=180000)
public void testBootMasterFirst() throws Exception {
startMaster();
startRegionServer();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java
index 07b9cbd..2fa9701 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java
@@ -35,7 +35,7 @@ import org.junit.experimental.categories.Category;
public class TestFSTableDescriptorForceCreation {
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
- @Test
+ @Test (timeout=60000)
public void testShouldCreateNewTableDescriptorIfForcefulCreationIsFalse()
throws IOException {
final String name = "newTable2";
@@ -47,7 +47,7 @@ public class TestFSTableDescriptorForceCreation {
assertTrue("Should create new table descriptor", fstd.createTableDescriptor(htd, false));
}
- @Test
+ @Test (timeout=60000)
public void testShouldNotCreateTheSameTableDescriptorIfForcefulCreationIsFalse()
throws IOException {
final String name = "testAlreadyExists";
@@ -60,7 +60,7 @@ public class TestFSTableDescriptorForceCreation {
assertFalse("Should not create new table descriptor", fstd.createTableDescriptor(htd, false));
}
- @Test
+ @Test (timeout=60000)
public void testShouldAllowForcefulCreationOfAlreadyExistingTableDescriptor()
throws Exception {
final String name = "createNewTableNew2";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java
index d8178f0..9d4578a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java
@@ -61,7 +61,7 @@ public class TestGlobalMemStoreSize {
* region's mem store size
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testGlobalMemStore() throws Exception {
// Start the cluster
LOG.info("Starting cluster");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
index f3e3dc2..fdb9778 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
@@ -128,7 +128,7 @@ public class TestHBaseTestingUtility {
}
}
- @Test
+ @Test (timeout=300000)
public void testMiniClusterBindToWildcard() throws Exception {
HBaseTestingUtility hbt = new HBaseTestingUtility();
hbt.getConfiguration().set("hbase.regionserver.ipc.address", "0.0.0.0");
@@ -140,7 +140,7 @@ public class TestHBaseTestingUtility {
}
}
- @Test
+ @Test (timeout=300000)
public void testMiniClusterWithSSLOn() throws Exception {
final String BASEDIR = System.getProperty("test.build.dir",
"target/test-dir") + "/" + TestHBaseTestingUtility.class.getSimpleName();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java
index 4fa945a..1ceae60 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java
@@ -72,7 +72,7 @@ public class TestHColumnDescriptorDefaultVersions {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testCreateTableWithDefault() throws IOException {
Admin admin = TEST_UTIL.getHBaseAdmin();
// Create a table with one family
@@ -89,7 +89,7 @@ public class TestHColumnDescriptorDefaultVersions {
}
}
- @Test
+ @Test (timeout=180000)
public void testCreateTableWithDefaultFromConf() throws Exception {
TEST_UTIL.shutdownMiniCluster();
TEST_UTIL.getConfiguration().setInt("hbase.column.max.version", 3);
@@ -111,7 +111,7 @@ public class TestHColumnDescriptorDefaultVersions {
}
}
- @Test
+ @Test (timeout=180000)
public void testCreateTableWithSetVersion() throws Exception {
TEST_UTIL.shutdownMiniCluster();
TEST_UTIL.getConfiguration().setInt("hbase.column.max.version", 3);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHDFSBlocksDistribution.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHDFSBlocksDistribution.java
index 2329fc2..84d13f5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHDFSBlocksDistribution.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHDFSBlocksDistribution.java
@@ -30,7 +30,7 @@ import static junit.framework.Assert.assertEquals;
@Category({MiscTests.class, SmallTests.class})
public class TestHDFSBlocksDistribution {
- @Test
+ @Test (timeout=60000)
public void testAddHostsAndBlockWeight() throws Exception {
HDFSBlocksDistribution distribution = new HDFSBlocksDistribution();
distribution.addHostsAndBlockWeight(null, 100);
@@ -57,7 +57,7 @@ public class TestHDFSBlocksDistribution {
}
- @Test
+ @Test (timeout=60000)
public void testAdd() throws Exception {
HDFSBlocksDistribution distribution = new HDFSBlocksDistribution();
distribution.add(new MockHDFSBlocksDistribution());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java
index 2ad5f9a..da54712 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java
@@ -35,7 +35,7 @@ public class TestHRegionLocation {
* port -- even if they are carrying different regions. Verify that is indeed
* the case.
*/
- @Test
+ @Test (timeout=60000)
public void testHashAndEqualsCode() {
ServerName hsa1 = ServerName.valueOf("localhost", 1234, -1L);
HRegionLocation hrl1 = new HRegionLocation(HRegionInfo.FIRST_META_REGIONINFO, hsa1);
@@ -56,14 +56,14 @@ public class TestHRegionLocation {
assertTrue(hrl4.equals(hrl5));
}
- @Test
+ @Test (timeout=60000)
public void testToString() {
ServerName hsa1 = ServerName.valueOf("localhost", 1234, -1L);
HRegionLocation hrl1 = new HRegionLocation(HRegionInfo.FIRST_META_REGIONINFO, hsa1);
System.out.println(hrl1.toString());
}
- @Test
+ @Test (timeout=60000)
public void testCompareTo() {
ServerName hsa1 = ServerName.valueOf("localhost", 1234, -1L);
HRegionLocation hsl1 =
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java
index f44eb7b..e22b268 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java
@@ -222,7 +222,7 @@ public class TestIOFencing {
* a new regionserver altogether. This fakes the double assignment case where region in one
* location changes the files out from underneath a region being served elsewhere.
*/
- @Test
+ @Test (timeout=180000)
public void testFencingAroundCompaction() throws Exception {
doTest(BlockCompactionsInPrepRegion.class, false);
doTest(BlockCompactionsInPrepRegion.class, true);
@@ -234,7 +234,7 @@ public class TestIOFencing {
* a new regionserver altogether. This fakes the double assignment case where region in one
* location changes the files out from underneath a region being served elsewhere.
*/
- @Test
+ @Test (timeout=180000)
public void testFencingAroundCompactionAfterWALSync() throws Exception {
doTest(BlockCompactionsInCompletionRegion.class, false);
doTest(BlockCompactionsInCompletionRegion.class, true);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
index 6b5ad98..4adc495 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java
@@ -105,7 +105,7 @@ public class TestIPv6NIOServerSocketChannel {
* Checks whether we are effected by the JDK issue on windows, and if so
* ensures that we are running with preferIPv4Stack=true.
*/
- @Test
+ @Test (timeout=60000)
public void testServerSocket() throws IOException {
byte[] addr = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 };
InetAddress inetAddr = InetAddress.getByAddress(addr);
@@ -143,7 +143,7 @@ public class TestIPv6NIOServerSocketChannel {
* Tests whether every InetAddress we obtain by resolving can open a
* ServerSocketChannel.
*/
- @Test
+ @Test (timeout=60000)
public void testServerSocketFromLocalhostResolution() throws IOException {
InetAddress[] addrs = InetAddress.getAllByName("localhost");
for (InetAddress addr : addrs) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServers.java
index a72b151..31d0fba 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServers.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServers.java
@@ -64,7 +64,7 @@ public class TestInfoServers {
/**
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testInfoServersRedirect() throws Exception {
// give the cluster time to start up
UTIL.getConnection().getTable(TableName.META_TABLE_NAME).close();
@@ -84,7 +84,7 @@ public class TestInfoServers {
* TestMasterStatusServlet, but those are true unit tests
* whereas this uses a cluster.
*/
- @Test
+ @Test (timeout=180000)
public void testInfoServersStatusPages() throws Exception {
// give the cluster time to start up
UTIL.getConnection().getTable(TableName.META_TABLE_NAME).close();
@@ -97,7 +97,7 @@ public class TestInfoServers {
"/rs-status"), "meta");
}
- @Test
+ @Test (timeout=180000)
public void testMasterServerReadOnly() throws Exception {
TableName tableName = TableName.valueOf("testMasterServerReadOnly");
byte[] cf = Bytes.toBytes("d");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXListener.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXListener.java
index ed141a6..68d115a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXListener.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXListener.java
@@ -62,7 +62,7 @@ public class TestJMXListener {
UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testStart() throws Exception {
JMXConnector connector = JMXConnectorFactory.connect(
JMXListener.buildJMXServiceURL(connectorPort,connectorPort));
@@ -78,7 +78,7 @@ public class TestJMXListener {
//shutdown hbase only. then try connect, IOException expected
@Rule
public ExpectedException expectedEx = ExpectedException.none();
- @Test
+ @Test (timeout=180000)
public void testStop() throws Exception {
MiniHBaseCluster cluster = UTIL.getHBaseCluster();
LOG.info("shutdown hbase cluster...");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestLocalHBaseCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestLocalHBaseCluster.java
index bbf4f32..ea644f0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestLocalHBaseCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestLocalHBaseCluster.java
@@ -41,7 +41,7 @@ public class TestLocalHBaseCluster {
* HBaseTestingUtility facilities for creating a LocalHBaseCluster with
* custom master and regionserver classes.
*/
- @Test
+ @Test (timeout=180000)
public void testLocalHBaseCluster() throws Exception {
TEST_UTIL.startMiniCluster(1, 1, null, MyHMaster.class, MyHRegionServer.class);
// Can we cast back to our master class?
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java
index eefb974..e3463f7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java
@@ -291,7 +291,7 @@ public class TestMetaTableAccessor {
pair.getFirst().getEncodedName());
}
- @Test
+ @Test (timeout=180000)
public void testParseReplicaIdFromServerColumn() {
String column1 = HConstants.SERVER_QUALIFIER_STR;
assertEquals(0, MetaTableAccessor.parseReplicaIdFromServerColumn(Bytes.toBytes(column1)));
@@ -307,7 +307,7 @@ public class TestMetaTableAccessor {
assertEquals(-1, MetaTableAccessor.parseReplicaIdFromServerColumn(Bytes.toBytes(column6)));
}
- @Test
+ @Test (timeout=180000)
public void testMetaReaderGetColumnMethods() {
Assert.assertArrayEquals(HConstants.SERVER_QUALIFIER, MetaTableAccessor.getServerColumn(0));
Assert.assertArrayEquals(Bytes.toBytes(HConstants.SERVER_QUALIFIER_STR
@@ -327,7 +327,7 @@ public class TestMetaTableAccessor {
MetaTableAccessor.getSeqNumColumn(42));
}
- @Test
+ @Test (timeout=180000)
public void testMetaLocationsForRegionReplicas() throws IOException {
ServerName serverName0 = ServerName.valueOf("foo", 60010, random.nextLong());
ServerName serverName1 = ServerName.valueOf("bar", 60010, random.nextLong());
@@ -404,7 +404,7 @@ public class TestMetaTableAccessor {
assertEquals(0, startCodeCell.getValueLength());
}
- @Test
+ @Test (timeout=180000)
public void testMetaLocationForRegionReplicasIsAddedAtTableCreation() throws IOException {
long regionId = System.currentTimeMillis();
HRegionInfo primary = new HRegionInfo(TableName.valueOf("table_foo"),
@@ -422,7 +422,7 @@ public class TestMetaTableAccessor {
}
}
- @Test
+ @Test (timeout=180000)
public void testMetaLocationForRegionReplicasIsAddedAtRegionSplit() throws IOException {
long regionId = System.currentTimeMillis();
ServerName serverName0 = ServerName.valueOf("foo", 60010, random.nextLong());
@@ -450,7 +450,7 @@ public class TestMetaTableAccessor {
}
}
- @Test
+ @Test (timeout=180000)
public void testMetaLocationForRegionReplicasIsAddedAtRegionMerge() throws IOException {
long regionId = System.currentTimeMillis();
ServerName serverName0 = ServerName.valueOf("foo", 60010, random.nextLong());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
index f70a0d7..9980e84 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
@@ -83,7 +83,7 @@ public class TestMetaTableAccessorNoCluster {
UTIL.shutdownMiniZKCluster();
}
- @Test
+ @Test (timeout=180000)
public void testGetHRegionInfo() throws IOException {
assertNull(HRegionInfo.getHRegionInfo(new Result()));
@@ -119,7 +119,7 @@ public class TestMetaTableAccessorNoCluster {
* @throws IOException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=180000)
public void testRideOverServerNotRunning()
throws IOException, InterruptedException, ServiceException {
// Need a zk watcher.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java
index 9943749..29caabc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java
@@ -205,7 +205,7 @@ public class TestMetaTableLocator {
* @throws KeeperException
* @throws ServiceException
*/
- @Test
+ @Test (timeout=180000)
public void testGetMetaServerConnectionFails()
throws IOException, InterruptedException, KeeperException, ServiceException {
testVerifyMetaRegionLocationWithException(new ConnectException("Connection refused"));
@@ -225,7 +225,7 @@ public class TestMetaTableLocator {
* @throws KeeperException
* @throws ServiceException
*/
- @Test
+ @Test (timeout=180000)
public void testVerifyMetaRegionServerNotRunning()
throws IOException, InterruptedException, KeeperException, ServiceException {
testVerifyMetaRegionLocationWithException(new ServerNotRunningYetException("mock"));
@@ -238,7 +238,7 @@ public class TestMetaTableLocator {
* @throws KeeperException
* @throws ServiceException
*/
- @Test
+ @Test (timeout=180000)
public void testVerifyMetaRegionLocationFails()
throws IOException, InterruptedException, KeeperException, ServiceException {
ClusterConnection connection = Mockito.mock(ClusterConnection.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
index 278973e..f419566 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
@@ -87,7 +87,7 @@ public class TestMultiVersions {
* up cluster running more than a single test per spin up. Keep old tests'
* crazyness.
*/
- @Test
+ @Test (timeout=180000)
public void testTimestamps() throws Exception {
HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("testTimestamps"));
HColumnDescriptor hcd = new HColumnDescriptor(TimestampTestBase.FAMILY_NAME);
@@ -121,7 +121,7 @@ public class TestMultiVersions {
* up cluster running more than a single test per spin up. Keep old tests'
* crazyness.
*/
- @Test
+ @Test (timeout=180000)
public void testGetRowVersions() throws Exception {
final String tableName = "testGetRowVersions";
final byte [] contents = Bytes.toBytes("contents");
@@ -187,7 +187,7 @@ public class TestMultiVersions {
* Tests five cases of scans and timestamps.
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testScanMultipleVersions() throws Exception {
final TableName tableName = TableName.valueOf("testScanMultipleVersions");
final HTableDescriptor desc = new HTableDescriptor(tableName);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
index f47a8e0..fb2bd96 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
@@ -99,7 +99,7 @@ public class TestNamespace {
}
}
- @Test
+ @Test (timeout=180000)
public void verifyReservedNS() throws IOException {
//verify existence of reserved namespaces
NamespaceDescriptor ns =
@@ -150,7 +150,7 @@ public class TestNamespace {
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteReservedNS() throws Exception {
boolean exceptionCaught = false;
try {
@@ -172,7 +172,7 @@ public class TestNamespace {
}
}
- @Test
+ @Test (timeout=180000)
public void createRemoveTest() throws Exception {
String testName = "createRemoveTest";
String nsName = prefix+"_"+testName;
@@ -195,7 +195,7 @@ public class TestNamespace {
assertNull(zkNamespaceManager.get(nsName));
}
- @Test
+ @Test (timeout=180000)
public void createDoubleTest() throws IOException, InterruptedException {
String testName = "createDoubleTest";
String nsName = prefix+"_"+testName;
@@ -218,7 +218,7 @@ public class TestNamespace {
assertEquals(1, admin.listTables().length);
}
- @Test
+ @Test (timeout=180000)
public void createTableTest() throws IOException, InterruptedException {
String testName = "createTableTest";
String nsName = prefix+"_"+testName;
@@ -266,7 +266,7 @@ public class TestNamespace {
admin.deleteNamespace(nsName);
}
- @Test
+ @Test (timeout=180000)
public void createTableInDefaultNamespace() throws Exception {
HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("default_table"));
HColumnDescriptor colDesc = new HColumnDescriptor("cf1");
@@ -277,7 +277,7 @@ public class TestNamespace {
admin.deleteTable(desc.getTableName());
}
- @Test
+ @Test (timeout=180000)
public void createTableInSystemNamespace() throws Exception {
TableName tableName = TableName.valueOf("hbase:createTableInSystemNamespace");
HTableDescriptor desc = new HTableDescriptor(tableName);
@@ -290,7 +290,7 @@ public class TestNamespace {
admin.deleteTable(desc.getTableName());
}
- @Ignore @Test
+ @Ignore @Test (timeout=180000)
public void testNamespaceJanitor() throws Exception {
FileSystem fs = TEST_UTIL.getTestFileSystem();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
index e35fc08..b532b0b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
@@ -48,7 +48,7 @@ import com.yammer.metrics.stats.UniformSample;
public class TestPerformanceEvaluation {
private static final HBaseTestingUtility HTU = new HBaseTestingUtility();
- @Test
+ @Test (timeout=60000)
public void testSerialization()
throws JsonGenerationException, JsonMappingException, IOException {
PerformanceEvaluation.TestOptions options = new PerformanceEvaluation.TestOptions();
@@ -65,7 +65,7 @@ public class TestPerformanceEvaluation {
* Exercise the mr spec writing. Simple assertions to make sure it is basically working.
* @throws IOException
*/
- @Ignore @Test
+ @Ignore @Test (timeout=60000)
public void testWriteInputFile() throws IOException {
TestOptions opts = new PerformanceEvaluation.TestOptions();
final int clients = 10;
@@ -93,7 +93,7 @@ public class TestPerformanceEvaluation {
}
}
- @Test
+ @Test (timeout=60000)
public void testSizeCalculation() {
TestOptions opts = new PerformanceEvaluation.TestOptions();
opts = PerformanceEvaluation.calculateRowsAndSize(opts);
@@ -115,7 +115,7 @@ public class TestPerformanceEvaluation {
assertEquals(defaultPerClientRunRows * 2, opts.getPerClientRunRows());
}
- @Test
+ @Test (timeout=60000)
public void testZipfian()
throws NoSuchMethodException, SecurityException, InstantiationException, IllegalAccessException,
IllegalArgumentException, InvocationTargetException {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestSerialization.java
index c29a460..09ca424 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestSerialization.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestSerialization.java
@@ -121,7 +121,7 @@ public class TestSerialization {
}
- @Test
+ @Test (timeout=60000)
public void testSplitLogTask() throws DeserializationException {
SplitLogTask slt = new SplitLogTask.Unassigned(ServerName.valueOf("mgr,1,1"),
RecoveryMode.LOG_REPLAY);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java
index 5c56e9a..8730ced 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java
@@ -34,7 +34,7 @@ import com.google.protobuf.ByteString;
@Category({MiscTests.class, SmallTests.class})
public class TestServerLoad {
- @Test
+ @Test (timeout=60000)
public void testRegionLoadAggregation() {
ServerLoad sl = new ServerLoad(createServerLoadProto());
assertEquals(13, sl.getStores());
@@ -47,7 +47,7 @@ public class TestServerLoad {
}
- @Test
+ @Test (timeout=60000)
public void testToString() {
ServerLoad sl = new ServerLoad(createServerLoadProto());
String slToString = sl.toString();
@@ -59,7 +59,7 @@ public class TestServerLoad {
assertTrue(slToString.contains("coprocessors=[]"));
}
- @Test
+ @Test (timeout=60000)
public void testRegionLoadWrapAroundAggregation() {
ServerLoad sl = new ServerLoad(createServerLoadProto());
long totalCount = ((long)Integer.MAX_VALUE)*2;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerName.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerName.java
index e5125c6..af29146 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerName.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerName.java
@@ -33,7 +33,7 @@ import org.junit.experimental.categories.Category;
@Category({MiscTests.class, SmallTests.class})
public class TestServerName {
- @Test
+ @Test (timeout=60000)
public void testGetHostNameMinusDomain() {
assertEquals("2607:f0d0:1002:51::4",
ServerName.getHostNameMinusDomain("2607:f0d0:1002:51::4"));
@@ -47,7 +47,7 @@ public class TestServerName {
assertEquals("asf000.sp2.ygridcore.net,1,1", sn.toString());
}
- @Test
+ @Test (timeout=60000)
public void testShortString() {
ServerName sn = ServerName.valueOf("asf000.sp2.ygridcore.net", 1, 1);
assertEquals("asf000:1", sn.toShortString());
@@ -57,7 +57,7 @@ public class TestServerName {
assertEquals("1.1.1.1:1", sn.toShortString());
}
- @Test
+ @Test (timeout=60000)
public void testRegexPatterns() {
assertTrue(Pattern.matches(Addressing.VALID_PORT_REGEX, "123"));
assertFalse(Pattern.matches(Addressing.VALID_PORT_REGEX, ""));
@@ -81,7 +81,7 @@ public class TestServerName {
assertEquals(expecting, ServerName.parseVersionedServerName(bytes).toString());
}
- @Test
+ @Test (timeout=60000)
public void testServerName() {
ServerName sn = ServerName.valueOf("www.example.org", 1234, 5678);
ServerName sn2 = ServerName.valueOf("www.example.org", 1234, 5678);
@@ -99,7 +99,7 @@ public class TestServerName {
ServerName.SERVERNAME_SEPARATOR + "5678");
}
- @Test
+ @Test (timeout=60000)
public void getServerStartcodeFromServerName() {
ServerName sn = ServerName.valueOf("www.example.org", 1234, 5678);
assertEquals(5678,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestTableDescriptor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestTableDescriptor.java
index 89029b9..743144d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestTableDescriptor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestTableDescriptor.java
@@ -39,7 +39,7 @@ import static org.junit.Assert.assertEquals;
public class TestTableDescriptor {
final static Log LOG = LogFactory.getLog(TestTableDescriptor.class);
- @Test
+ @Test (timeout=60000)
public void testPb() throws DeserializationException, IOException {
HTableDescriptor htd = new HTableDescriptor(TableName.META_TABLE_NAME);
final int v = 123;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
index 30ad325..b3f48e1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
@@ -141,7 +141,7 @@ public class TestZooKeeper {
* @throws InterruptedException
*/
// fails frequently, disabled for now, see HBASE-6406
- //@Test
+ //@Test (timeout=300000)
public void testClientSessionExpired() throws Exception {
Configuration c = new Configuration(TEST_UTIL.getConfiguration());
@@ -269,7 +269,7 @@ public class TestZooKeeper {
table.close();
}
- @Test
+ @Test (timeout=300000)
public void testMultipleZK()
throws IOException, NoSuchMethodException, InvocationTargetException, IllegalAccessException {
Table localMeta = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME);
@@ -300,7 +300,7 @@ public class TestZooKeeper {
* Create a znode with data
* @throws Exception
*/
- @Test
+ @Test (timeout=300000)
public void testCreateWithParents() throws Exception {
ZooKeeperWatcher zkw =
new ZooKeeperWatcher(new Configuration(TEST_UTIL.getConfiguration()),
@@ -322,7 +322,7 @@ public class TestZooKeeper {
* delete it recursively, then delete the last znode
* @throws Exception
*/
- @Test
+ @Test (timeout=300000)
public void testZNodeDeletes() throws Exception {
ZooKeeperWatcher zkw = new ZooKeeperWatcher(
new Configuration(TEST_UTIL.getConfiguration()),
@@ -345,7 +345,7 @@ public class TestZooKeeper {
assertNull(ZKUtil.getDataNoWatch(zkw, "/l1/l2", null));
}
- @Test
+ @Test (timeout=300000)
public void testClusterKey() throws Exception {
testKey("server", "2181", "hbase");
testKey("server1,server2,server3", "2181", "hbase");
@@ -379,7 +379,7 @@ public class TestZooKeeper {
* @throws KeeperException Any of the zookeeper connections had a
* KeeperException
*/
- @Test
+ @Test (timeout=300000)
public void testCreateSilentIsReallySilent() throws InterruptedException,
KeeperException, IOException {
Configuration c = TEST_UTIL.getConfiguration();
@@ -471,7 +471,7 @@ public class TestZooKeeper {
* Test should not fail with NPE when getChildDataAndWatchForNewChildren
* invoked with wrongNode
*/
- @Test
+ @Test (timeout=300000)
@SuppressWarnings("deprecation")
public void testGetChildDataAndWatchForNewChildrenShouldNotThrowNPE()
throws Exception {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
index 903ce0e..d6e268d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
@@ -113,7 +113,7 @@ public class TestHFileArchiving {
}
}
- @Test
+ @Test (timeout=180000)
public void testRemovesRegionDirOnArchive() throws Exception {
TableName TABLE_NAME =
TableName.valueOf("testRemovesRegionDirOnArchive");
@@ -173,7 +173,7 @@ public class TestHFileArchiving {
* still has hidden files.
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testDeleteRegionWithNoStoreFiles() throws Exception {
TableName TABLE_NAME =
TableName.valueOf("testDeleteRegionWithNoStoreFiles");
@@ -222,7 +222,7 @@ public class TestHFileArchiving {
UTIL.deleteTable(TABLE_NAME);
}
- @Test
+ @Test (timeout=180000)
public void testArchiveOnTableDelete() throws Exception {
TableName TABLE_NAME =
TableName.valueOf("testArchiveOnTableDelete");
@@ -301,7 +301,7 @@ public class TestHFileArchiving {
* Test that the store files are archived when a column family is removed.
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testArchiveOnTableFamilyDelete() throws Exception {
TableName TABLE_NAME =
TableName.valueOf("testArchiveOnTableFamilyDelete");
@@ -349,7 +349,7 @@ public class TestHFileArchiving {
/**
* Test HFileArchiver.resolveAndArchive() race condition HBASE-7643
*/
- @Test
+ @Test (timeout=180000)
public void testCleaningRace() throws Exception {
final long TEST_TIME = 20 * 1000;
final ChoreService choreService = new ChoreService("TEST_SERVER_NAME");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
index fd1eff7..de79202 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
@@ -1152,7 +1152,7 @@ public class TestAdmin1 {
table.close();
}
- @Test
+ @Test (timeout=300000)
public void testSplitAndMergeWithReplicaTable() throws Exception {
// The test tries to directly split replica regions and directly merge replica regions. These
// are not allowed. The test validates that. Then the test does a valid split/merge of allowed
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutate.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutate.java
index a8c4abd..46cc7d5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutate.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutate.java
@@ -50,7 +50,7 @@ public class TestCheckAndMutate {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testCheckAndMutate() throws Exception {
final TableName tableName = TableName.valueOf("TestPutWithDelete");
final byte[] rowKey = Bytes.toBytes("12345");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java
index 072098e..1ca6d59 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java
@@ -90,7 +90,7 @@ public class TestClientOperationInterrupt {
}
- @Test
+ @Test (timeout=180000)
public void testInterrupt50Percent() throws IOException, InterruptedException {
final AtomicInteger noEx = new AtomicInteger(0);
final AtomicInteger badEx = new AtomicInteger(0);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
index 65483c9..c6c9cdc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
@@ -82,7 +82,7 @@ public class TestClientScannerRPCTimeout {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testScannerNextRPCTimesout() throws Exception {
final TableName TABLE_NAME = TableName.valueOf("testScannerNextRPCTimesout");
Table ht = TEST_UTIL.createTable(TABLE_NAME, FAMILY);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java
index d3986b2..c11964f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java
@@ -84,7 +84,7 @@ public class TestClientTimeouts {
* doesn't throw any unexpected exceptions.
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testAdminTimeout() throws Exception {
Connection lastConnection = null;
boolean lastFailed = false;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java
index 2cb2cfc..87baaf9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java
@@ -152,7 +152,7 @@ public class TestCloneSnapshotFromClient {
admin.cloneSnapshot(snapshotName1, clonedTableName);
}
- @Test
+ @Test (timeout=300000)
public void testCloneSnapshot() throws IOException, InterruptedException {
TableName clonedTableName = TableName.valueOf("clonedtb-" + System.currentTimeMillis());
testCloneSnapshot(clonedTableName, snapshotName0, snapshot0Rows);
@@ -174,7 +174,7 @@ public class TestCloneSnapshotFromClient {
SnapshotTestingUtils.verifyReplicasCameOnline(tableName, admin, getNumReplicas());
}
- @Test
+ @Test (timeout=300000)
public void testCloneSnapshotCrossNamespace() throws IOException, InterruptedException {
String nsName = "testCloneSnapshotCrossNamespace";
admin.createNamespace(NamespaceDescriptor.create(nsName).build());
@@ -188,7 +188,7 @@ public class TestCloneSnapshotFromClient {
/**
* Verify that tables created from the snapshot are still alive after source table deletion.
*/
- @Test
+ @Test (timeout=300000)
public void testCloneLinksAfterDelete() throws IOException, InterruptedException {
// Clone a table from the first snapshot
TableName clonedTableName = TableName.valueOf("clonedtb1-" + System.currentTimeMillis());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestConnectionUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestConnectionUtils.java
index ac0a0bd..51ad887 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestConnectionUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestConnectionUtils.java
@@ -32,7 +32,7 @@ import static org.junit.Assert.assertTrue;
@Category({SmallTests.class, ClientTests.class})
public class TestConnectionUtils {
- @Test
+ @Test (timeout=60000)
public void testRetryTimeJitter() {
long[] retries = new long[200];
long baseTime = 1000000; //Larger number than reality to help test randomness.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
index e2b915f..d72f6fa 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
@@ -95,7 +95,7 @@ public class TestFastFail {
// Nothing to do.
}
- @Test
+ @Test (timeout=180000)
public void testFastFail() throws IOException, InterruptedException {
Admin admin = TEST_UTIL.getHBaseAdmin();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index 67e33b2..825b29f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -166,7 +166,7 @@ public class TestFromClientSide {
/**
* Basic client side validation of HBASE-4536
*/
- @Test
+ @Test (timeout=300000)
public void testKeepDeletedCells() throws Exception {
final TableName TABLENAME = TableName.valueOf("testKeepDeletesCells");
final byte[] FAMILY = Bytes.toBytes("family");
@@ -233,7 +233,7 @@ public class TestFromClientSide {
/**
* Basic client side validation of HBASE-10118
*/
- @Test
+ @Test (timeout=300000)
public void testPurgeFutureDeletes() throws Exception {
final TableName TABLENAME = TableName.valueOf("testPurgeFutureDeletes");
final byte[] ROW = Bytes.toBytes("row");
@@ -290,7 +290,7 @@ public class TestFromClientSide {
* @throws Exception
*/
@Deprecated
- @Test
+ @Test (timeout=300000)
public void testSharedZooKeeper() throws Exception {
Configuration newConfig = new Configuration(TEST_UTIL.getConfiguration());
newConfig.set(HConstants.HBASE_CLIENT_INSTANCE_ID, "12345");
@@ -379,7 +379,7 @@ public class TestFromClientSide {
* Verifies that getConfiguration returns the same Configuration object used
* to create the HTable instance.
*/
- @Test
+ @Test (timeout=300000)
public void testGetConfiguration() throws Exception {
TableName TABLE = TableName.valueOf("testGetConfiguration");
byte[][] FAMILIES = new byte[][] { Bytes.toBytes("foo") };
@@ -394,7 +394,7 @@ public class TestFromClientSide {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=300000)
public void testWeirdCacheBehaviour() throws Exception {
TableName TABLE = TableName.valueOf("testWeirdCacheBehaviour");
byte [][] FAMILIES = new byte[][] { Bytes.toBytes("trans-blob"),
@@ -526,7 +526,7 @@ public class TestFromClientSide {
* @throws IOException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=300000)
public void testFilterAcrossMultipleRegions()
throws IOException, InterruptedException {
TableName name = TableName.valueOf("testFilterAcrossMutlipleRegions");
@@ -670,7 +670,7 @@ public class TestFromClientSide {
return regions;
}
- @Test
+ @Test (timeout=300000)
public void testSuperSimple() throws Exception {
byte [] TABLE = Bytes.toBytes("testSuperSimple");
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
@@ -685,7 +685,7 @@ public class TestFromClientSide {
scanner.close();
}
- @Test
+ @Test (timeout=300000)
public void testMaxKeyValueSize() throws Exception {
byte [] TABLE = Bytes.toBytes("testMaxKeyValueSize");
Configuration conf = TEST_UTIL.getConfiguration();
@@ -712,7 +712,7 @@ public class TestFromClientSide {
conf.set(TableConfiguration.MAX_KEYVALUE_SIZE_KEY, oldMaxSize);
}
- @Test
+ @Test (timeout=300000)
public void testFilters() throws Exception {
byte [] TABLE = Bytes.toBytes("testFilters");
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
@@ -748,7 +748,7 @@ public class TestFromClientSide {
scanner.close();
}
- @Test
+ @Test (timeout=300000)
public void testFilterWithLongCompartor() throws Exception {
byte [] TABLE = Bytes.toBytes("testFilterWithLongCompartor");
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
@@ -779,7 +779,7 @@ public class TestFromClientSide {
scanner.close();
}
- @Test
+ @Test (timeout=300000)
public void testKeyOnlyFilter() throws Exception {
byte [] TABLE = Bytes.toBytes("testKeyOnlyFilter");
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
@@ -816,7 +816,7 @@ public class TestFromClientSide {
/**
* Test simple table and non-existent row cases.
*/
- @Test
+ @Test (timeout=300000)
public void testSimpleMissing() throws Exception {
byte [] TABLE = Bytes.toBytes("testSimpleMissing");
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
@@ -927,7 +927,7 @@ public class TestFromClientSide {
* Test basic puts, gets, scans, and deletes for a single row
* in a multiple family table.
*/
- @Test
+ @Test (timeout=300000)
public void testSingleRowMultipleFamily() throws Exception {
byte [] TABLE = Bytes.toBytes("testSingleRowMultipleFamily");
byte [][] ROWS = makeN(ROW, 3);
@@ -1228,7 +1228,7 @@ public class TestFromClientSide {
}
- @Test
+ @Test (timeout=300000)
public void testNull() throws Exception {
byte [] TABLE = Bytes.toBytes("testNull");
@@ -1336,7 +1336,7 @@ public class TestFromClientSide {
}
}
- @Test
+ @Test (timeout=300000)
public void testVersions() throws Exception {
byte [] TABLE = Bytes.toBytes("testVersions");
@@ -1550,7 +1550,7 @@ public class TestFromClientSide {
}
- @Test
+ @Test (timeout=300000)
public void testVersionLimits() throws Exception {
byte [] TABLE = Bytes.toBytes("testVersionLimits");
byte [][] FAMILIES = makeNAscii(FAMILY, 3);
@@ -1744,7 +1744,7 @@ public class TestFromClientSide {
}
- @Test
+ @Test (timeout=300000)
public void testDeleteFamilyVersion() throws Exception {
HBaseAdmin admin = TEST_UTIL.getHBaseAdmin();
byte [] TABLE = Bytes.toBytes("testDeleteFamilyVersion");
@@ -1783,7 +1783,7 @@ public class TestFromClientSide {
admin.close();
}
- @Test
+ @Test (timeout=300000)
public void testDeleteFamilyVersionWithOtherDeletes() throws Exception {
byte [] TABLE = Bytes.toBytes("testDeleteFamilyVersionWithOtherDeletes");
@@ -1898,7 +1898,7 @@ public class TestFromClientSide {
admin.close();
}
- @Test
+ @Test (timeout=300000)
public void testDeletes() throws Exception {
byte [] TABLE = Bytes.toBytes("testDeletes");
@@ -2205,7 +2205,7 @@ public class TestFromClientSide {
*
* Tests one hundred families, one million columns, one million versions
*/
- @Ignore @Test
+ @Ignore @Test (timeout=300000)
public void testMillions() throws Exception {
// 100 families
@@ -2216,7 +2216,7 @@ public class TestFromClientSide {
}
- @Ignore @Test
+ @Ignore @Test (timeout=300000)
public void testMultipleRegionsAndBatchPuts() throws Exception {
// Two family table
@@ -2245,7 +2245,7 @@ public class TestFromClientSide {
}
- @Ignore @Test
+ @Ignore @Test (timeout=300000)
public void testMultipleRowMultipleFamily() throws Exception {
}
@@ -2264,7 +2264,7 @@ public class TestFromClientSide {
* To test at scale, up numColsPerRow to the millions
* (have not gotten that to work running as junit though)
*/
- @Test
+ @Test (timeout=300000)
public void testJiraTest867() throws Exception {
int numRows = 10;
int numColsPerRow = 2000;
@@ -2350,7 +2350,7 @@ public class TestFromClientSide {
* get with timestamp will return a value if there is a version with an
* earlier timestamp
*/
- @Test
+ @Test (timeout=300000)
public void testJiraTest861() throws Exception {
byte [] TABLE = Bytes.toBytes("testJiraTest861");
@@ -2414,7 +2414,7 @@ public class TestFromClientSide {
* Add a HTable get/obtainScanner method that retrieves all versions of a
* particular column and row between two timestamps
*/
- @Test
+ @Test (timeout=300000)
public void testJiraTest33() throws Exception {
byte [] TABLE = Bytes.toBytes("testJiraTest33");
@@ -2463,7 +2463,7 @@ public class TestFromClientSide {
* HBASE-1014
* commit(BatchUpdate) method should return timestamp
*/
- @Test
+ @Test (timeout=300000)
public void testJiraTest1014() throws Exception {
byte [] TABLE = Bytes.toBytes("testJiraTest1014");
@@ -2488,7 +2488,7 @@ public class TestFromClientSide {
* HBASE-1182
* Scan for columns > some timestamp
*/
- @Test
+ @Test (timeout=300000)
public void testJiraTest1182() throws Exception {
byte [] TABLE = Bytes.toBytes("testJiraTest1182");
@@ -2532,7 +2532,7 @@ public class TestFromClientSide {
* HBASE-52
* Add a means of scanning over all versions
*/
- @Test
+ @Test (timeout=300000)
public void testJiraTest52() throws Exception {
byte [] TABLE = Bytes.toBytes("testJiraTest52");
byte [][] VALUES = makeNAscii(VALUE, 7);
@@ -3362,7 +3362,7 @@ public class TestFromClientSide {
return Bytes.equals(left, right);
}
- @Test
+ @Test (timeout=300000)
public void testDuplicateVersions() throws Exception {
byte [] TABLE = Bytes.toBytes("testDuplicateVersions");
@@ -3576,7 +3576,7 @@ public class TestFromClientSide {
0, 9);
}
- @Test
+ @Test (timeout=300000)
public void testUpdates() throws Exception {
byte [] TABLE = Bytes.toBytes("testUpdates");
@@ -3626,7 +3626,7 @@ public class TestFromClientSide {
assertEquals("DDD", Bytes.toString(navigableMap.get(2L)));
}
- @Test
+ @Test (timeout=300000)
public void testUpdatesWithMajorCompaction() throws Exception {
TableName TABLE = TableName.valueOf("testUpdatesWithMajorCompaction");
@@ -3687,7 +3687,7 @@ public class TestFromClientSide {
assertEquals("DDD", Bytes.toString(navigableMap.get(2L)));
}
- @Test
+ @Test (timeout=300000)
public void testMajorCompactionBetweenTwoUpdates() throws Exception {
String tableName = "testMajorCompactionBetweenTwoUpdates";
@@ -3755,7 +3755,7 @@ public class TestFromClientSide {
assertEquals("DDD", Bytes.toString(navigableMap.get(2L)));
}
- @Test
+ @Test (timeout=300000)
public void testGet_EmptyTable() throws IOException {
Table table = TEST_UTIL.createTable(TableName.valueOf("testGet_EmptyTable"), FAMILY);
Get get = new Get(ROW);
@@ -3764,7 +3764,7 @@ public class TestFromClientSide {
assertTrue(r.isEmpty());
}
- @Test
+ @Test (timeout=300000)
public void testGet_NullQualifier() throws IOException {
Table table = TEST_UTIL.createTable(TableName.valueOf("testGet_NullQualifier"), FAMILY);
Put put = new Put(ROW);
@@ -3787,7 +3787,7 @@ public class TestFromClientSide {
assertEquals(2, r.size());
}
- @Test
+ @Test (timeout=300000)
public void testGet_NonExistentRow() throws IOException {
Table table = TEST_UTIL.createTable(TableName.valueOf("testGet_NonExistentRow"), FAMILY);
Put put = new Put(ROW);
@@ -3809,7 +3809,7 @@ public class TestFromClientSide {
LOG.info("Row missing as it should be");
}
- @Test
+ @Test (timeout=300000)
public void testPut() throws IOException {
final byte [] CONTENTS_FAMILY = Bytes.toBytes("contents");
final byte [] SMALL_FAMILY = Bytes.toBytes("smallfam");
@@ -3849,7 +3849,7 @@ public class TestFromClientSide {
}
}
- @Test
+ @Test (timeout=300000)
public void testPutNoCF() throws IOException {
final byte[] BAD_FAM = Bytes.toBytes("BAD_CF");
final byte[] VAL = Bytes.toBytes(100);
@@ -3868,7 +3868,7 @@ public class TestFromClientSide {
}
- @Test
+ @Test (timeout=300000)
public void testRowsPut() throws IOException {
final byte[] CONTENTS_FAMILY = Bytes.toBytes("contents");
final byte[] SMALL_FAMILY = Bytes.toBytes("smallfam");
@@ -3895,7 +3895,7 @@ public class TestFromClientSide {
assertEquals(NB_BATCH_ROWS, nbRows);
}
- @Test
+ @Test (timeout=300000)
public void testRowsPutBufferedOneFlush() throws IOException {
final byte [] CONTENTS_FAMILY = Bytes.toBytes("contents");
final byte [] SMALL_FAMILY = Bytes.toBytes("smallfam");
@@ -3937,7 +3937,7 @@ public class TestFromClientSide {
table.close();
}
- @Test
+ @Test (timeout=300000)
public void testRowsPutBufferedManyManyFlushes() throws IOException {
final byte[] CONTENTS_FAMILY = Bytes.toBytes("contents");
final byte[] SMALL_FAMILY = Bytes.toBytes("smallfam");
@@ -3966,7 +3966,7 @@ public class TestFromClientSide {
assertEquals(NB_BATCH_ROWS * 10, nbRows);
}
- @Test
+ @Test (timeout=300000)
public void testAddKeyValue() throws IOException {
final byte[] CONTENTS_FAMILY = Bytes.toBytes("contents");
final byte[] value = Bytes.toBytes("abcd");
@@ -4000,7 +4000,7 @@ public class TestFromClientSide {
* test for HBASE-737
* @throws IOException
*/
- @Test
+ @Test (timeout=300000)
public void testHBase737 () throws IOException {
final byte [] FAM1 = Bytes.toBytes("fam1");
final byte [] FAM2 = Bytes.toBytes("fam2");
@@ -4091,7 +4091,7 @@ public class TestFromClientSide {
}
}
- @Test
+ @Test (timeout=300000)
public void testListTables() throws IOException, InterruptedException {
TableName t1 = TableName.valueOf("testListTables1");
TableName t2 = TableName.valueOf("testListTables2");
@@ -4137,7 +4137,7 @@ public class TestFromClientSide {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=300000)
public void testUnmanagedHConnection() throws IOException {
final TableName tableName = TableName.valueOf("testUnmanagedHConnection");
HTable t = createUnmangedHConnectionHTable(tableName);
@@ -4153,7 +4153,7 @@ public class TestFromClientSide {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=300000)
public void testUnmanagedHConnectionReconnect() throws Exception {
final TableName tableName = TableName.valueOf("testUnmanagedHConnectionReconnect");
HTable t = createUnmangedHConnectionHTable(tableName);
@@ -4180,7 +4180,7 @@ public class TestFromClientSide {
}
}
- @Test
+ @Test (timeout=300000)
public void testMiscHTableStuff() throws IOException {
final TableName tableAname = TableName.valueOf("testMiscHTableStuffA");
final TableName tableBname = TableName.valueOf("testMiscHTableStuffB");
@@ -4257,7 +4257,7 @@ public class TestFromClientSide {
}
}
- @Test
+ @Test (timeout=300000)
public void testGetClosestRowBefore() throws IOException, InterruptedException {
final TableName tableAname = TableName.valueOf("testGetClosestRowBefore");
final byte[] firstRow = Bytes.toBytes("row111");
@@ -4356,7 +4356,7 @@ public class TestFromClientSide {
* For HBASE-2156
* @throws Exception
*/
- @Test
+ @Test (timeout=300000)
public void testScanVariableReuse() throws Exception {
Scan scan = new Scan();
scan.addFamily(FAMILY);
@@ -4371,7 +4371,7 @@ public class TestFromClientSide {
assertTrue(scan.getFamilyMap().containsKey(FAMILY));
}
- @Test
+ @Test (timeout=300000)
public void testMultiRowMutation() throws Exception {
LOG.info("Starting testMultiRowMutation");
final TableName TABLENAME = TableName.valueOf("testMultiRowMutation");
@@ -4402,7 +4402,7 @@ public class TestFromClientSide {
assertEquals(0, Bytes.compareTo(VALUE, r.getValue(FAMILY, QUALIFIER)));
}
- @Test
+ @Test (timeout=300000)
public void testRowMutation() throws Exception {
LOG.info("Starting testRowMutation");
final TableName TABLENAME = TableName.valueOf("testRowMutation");
@@ -4434,7 +4434,7 @@ public class TestFromClientSide {
assertNull(r.getValue(FAMILY, QUALIFIERS[0]));
}
- @Test
+ @Test (timeout=300000)
public void testAppend() throws Exception {
LOG.info("Starting testAppend");
final TableName TABLENAME = TableName.valueOf("testAppend");
@@ -4463,7 +4463,7 @@ public class TestFromClientSide {
r.getColumnLatestCell(FAMILY, QUALIFIERS[2]).getTimestamp());
}
- @Test
+ @Test (timeout=300000)
public void testIncrementWithDeletes() throws Exception {
LOG.info("Starting testIncrementWithDeletes");
final TableName TABLENAME =
@@ -4485,7 +4485,7 @@ public class TestFromClientSide {
assertEquals(5, Bytes.toLong(r.getValue(FAMILY, COLUMN)));
}
- @Test
+ @Test (timeout=300000)
public void testIncrementingInvalidValue() throws Exception {
LOG.info("Starting testIncrementingInvalidValue");
final TableName TABLENAME = TableName.valueOf("testIncrementingInvalidValue");
@@ -4511,7 +4511,7 @@ public class TestFromClientSide {
}
}
- @Test
+ @Test (timeout=300000)
public void testIncrementInvalidArguments() throws Exception {
LOG.info("Starting testIncrementInvalidArguments");
final TableName TABLENAME = TableName.valueOf("testIncrementInvalidArguments");
@@ -4566,7 +4566,7 @@ public class TestFromClientSide {
}
}
- @Test
+ @Test (timeout=300000)
public void testIncrementOutOfOrder() throws Exception {
LOG.info("Starting testIncrementOutOfOrder");
final TableName TABLENAME = TableName.valueOf("testIncrementOutOfOrder");
@@ -4606,7 +4606,7 @@ public class TestFromClientSide {
assertIncrementKey(kvs[2], ROW, FAMILY, QUALIFIERS[2], 2);
}
- @Test
+ @Test (timeout=300000)
public void testIncrementOnSameColumn() throws Exception {
LOG.info("Starting testIncrementOnSameColumn");
final byte[] TABLENAME = Bytes.toBytes("testIncrementOnSameColumn");
@@ -4649,7 +4649,7 @@ public class TestFromClientSide {
ht.close();
}
- @Test
+ @Test (timeout=300000)
public void testIncrement() throws Exception {
LOG.info("Starting testIncrement");
final TableName TABLENAME = TableName.valueOf("testIncrement");
@@ -4721,7 +4721,7 @@ public class TestFromClientSide {
}
- @Test
+ @Test (timeout=300000)
public void testClientPoolRoundRobin() throws IOException {
final TableName tableName = TableName.valueOf("testClientPoolRoundRobin");
@@ -4757,7 +4757,7 @@ public class TestFromClientSide {
}
}
- @Ignore ("Flakey: HBASE-8989") @Test
+ @Ignore ("Flakey: HBASE-8989") @Test (timeout=300000)
public void testClientPoolThreadLocal() throws IOException {
final TableName tableName = TableName.valueOf("testClientPoolThreadLocal");
@@ -4840,7 +4840,7 @@ public class TestFromClientSide {
assertNull(error.get());
}
- @Test
+ @Test (timeout=300000)
public void testCheckAndPut() throws IOException {
final byte [] anotherrow = Bytes.toBytes("anotherrow");
final byte [] value2 = Bytes.toBytes("abcd");
@@ -4879,7 +4879,7 @@ public class TestFromClientSide {
}
- @Test
+ @Test (timeout=300000)
public void testCheckAndPutWithCompareOp() throws IOException {
final byte [] value1 = Bytes.toBytes("aaaa");
final byte [] value2 = Bytes.toBytes("bbbb");
@@ -4944,7 +4944,7 @@ public class TestFromClientSide {
assertEquals(ok, true);
}
- @Test
+ @Test (timeout=300000)
public void testCheckAndDeleteWithCompareOp() throws IOException {
final byte [] value1 = Bytes.toBytes("aaaa");
final byte [] value2 = Bytes.toBytes("bbbb");
@@ -5022,7 +5022,7 @@ public class TestFromClientSide {
* Test ScanMetrics
* @throws Exception
*/
- @Test
+ @Test (timeout=300000)
@SuppressWarnings ("unused")
public void testScanMetrics() throws Exception {
TableName TABLENAME = TableName.valueOf("testScanMetrics");
@@ -5128,7 +5128,7 @@ public class TestFromClientSide {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=300000)
public void testCacheOnWriteEvictOnClose() throws Exception {
TableName tableName = TableName.valueOf("testCOWEOCfromClient");
byte [] data = Bytes.toBytes("data");
@@ -5237,7 +5237,7 @@ public class TestFromClientSide {
assertEquals(count, store.getStorefilesCount());
}
- @Test
+ @Test (timeout=300000)
/**
* Tests the non cached version of getRegionLocator by moving a region.
*/
@@ -5283,7 +5283,7 @@ public class TestFromClientSide {
}
}
- @Test
+ @Test (timeout=300000)
/**
* Tests getRegionsInRange by creating some regions over which a range of
* keys spans; then changing the key range.
@@ -5344,7 +5344,7 @@ public class TestFromClientSide {
assertEquals(1, regionsList.size());
}
- @Test
+ @Test (timeout=300000)
public void testJira6912() throws Exception {
TableName TABLE = TableName.valueOf("testJira6912");
Table foo = TEST_UTIL.createTable(TABLE, new byte[][] {FAMILY}, 10);
@@ -5370,7 +5370,7 @@ public class TestFromClientSide {
assertEquals(1, bar.length);
}
- @Test
+ @Test (timeout=300000)
public void testScan_NullQualifier() throws IOException {
Table table = TEST_UTIL.createTable(TableName.valueOf("testScan_NullQualifier"), FAMILY);
Put put = new Put(ROW);
@@ -5399,7 +5399,7 @@ public class TestFromClientSide {
assertEquals(2, bar[0].size());
}
- @Test
+ @Test (timeout=300000)
public void testNegativeTimestamp() throws IOException {
Table table = TEST_UTIL.createTable(TableName.valueOf("testNegativeTimestamp"), FAMILY);
@@ -5458,7 +5458,7 @@ public class TestFromClientSide {
table.close();
}
- @Test
+ @Test (timeout=300000)
public void testIllegalTableDescriptor() throws Exception {
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("testIllegalTableDescriptor"));
HColumnDescriptor hcd = new HColumnDescriptor(FAMILY);
@@ -5543,7 +5543,7 @@ public class TestFromClientSide {
assertFalse(admin.tableExists(htd.getTableName()));
}
- @Test
+ @Test (timeout=300000)
public void testRawScanRespectsVersions() throws Exception {
TableName TABLE = TableName.valueOf("testRawScan");
Table table = TEST_UTIL.createTable(TABLE, FAMILY);
@@ -5613,7 +5613,7 @@ public class TestFromClientSide {
TEST_UTIL.deleteTable(TABLE);
}
- @Test
+ @Test (timeout=300000)
public void testSmallScan() throws Exception {
// Test Initialization.
TableName TABLE = TableName.valueOf("testSmallScan");
@@ -5650,7 +5650,7 @@ public class TestFromClientSide {
}
- @Test
+ @Test (timeout=300000)
public void testSuperSimpleWithReverseScan() throws Exception {
TableName TABLE = TableName.valueOf("testSuperSimpleWithReverseScan");
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
@@ -5695,7 +5695,7 @@ public class TestFromClientSide {
ht.close();
}
- @Test
+ @Test (timeout=300000)
public void testFiltersWithReverseScan() throws Exception {
TableName TABLE = TableName.valueOf("testFiltersWithReverseScan");
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
@@ -5735,7 +5735,7 @@ public class TestFromClientSide {
ht.close();
}
- @Test
+ @Test (timeout=300000)
public void testKeyOnlyFilterWithReverseScan() throws Exception {
TableName TABLE = TableName.valueOf("testKeyOnlyFilterWithReverseScan");
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
@@ -5776,7 +5776,7 @@ public class TestFromClientSide {
/**
* Test simple table and non-existent row cases.
*/
- @Test
+ @Test (timeout=300000)
public void testSimpleMissingWithReverseScan() throws Exception {
TableName TABLE = TableName.valueOf("testSimpleMissingWithReverseScan");
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
@@ -5841,7 +5841,7 @@ public class TestFromClientSide {
ht.close();
}
- @Test
+ @Test (timeout=300000)
public void testNullWithReverseScan() throws Exception {
TableName TABLE = TableName.valueOf("testNullWithReverseScan");
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
@@ -5878,7 +5878,7 @@ public class TestFromClientSide {
ht.close();
}
- @Test
+ @Test (timeout=300000)
public void testDeletesWithReverseScan() throws Exception {
TableName TABLE = TableName.valueOf("testDeletesWithReverseScan");
byte[][] ROWS = makeNAscii(ROW, 6);
@@ -6063,7 +6063,7 @@ public class TestFromClientSide {
/**
* Tests reversed scan under multi regions
*/
- @Test
+ @Test (timeout=300000)
public void testReversedScanUnderMultiRegions() throws Exception {
// Test Initialization.
TableName TABLE = TableName.valueOf("testReversedScanUnderMultiRegions");
@@ -6120,7 +6120,7 @@ public class TestFromClientSide {
/**
* Tests reversed scan under multi regions
*/
- @Test
+ @Test (timeout=300000)
public void testSmallReversedScanUnderMultiRegions() throws Exception {
// Test Initialization.
TableName TABLE = TableName.valueOf("testSmallReversedScanUnderMultiRegions");
@@ -6282,7 +6282,7 @@ public class TestFromClientSide {
assertEquals(4, count); // 003 004 005 006
}
- @Test
+ @Test (timeout=300000)
public void testGetStartEndKeysWithRegionReplicas() throws IOException {
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("testGetStartEndKeys"));
HColumnDescriptor fam = new HColumnDescriptor(FAMILY);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
index 680dcfb..2df9bb8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
@@ -266,7 +266,7 @@ public class TestFromClientSide3 {
"hbase.hstore.compaction.min"));
}
- @Test
+ @Test (timeout=300000)
public void testHTableBatchWithEmptyPut() throws Exception {
Table table = TEST_UTIL.createTable(
Bytes.toBytes("testHTableBatchWithEmptyPut"), new byte[][] { FAMILY });
@@ -290,7 +290,7 @@ public class TestFromClientSide3 {
}
}
- @Test
+ @Test (timeout=300000)
public void testHTableExistsMethodSingleRegionSingleGet() throws Exception {
// Test with a single region table.
@@ -332,7 +332,7 @@ public class TestFromClientSide3 {
assertEquals(results[2], false);
}
- @Test
+ @Test (timeout=300000)
public void testHTableExistsMethodMultipleRegionsSingleGet() throws Exception {
Table table = TEST_UTIL.createTable(
@@ -352,7 +352,7 @@ public class TestFromClientSide3 {
assertEquals(exist, true);
}
- @Test
+ @Test (timeout=300000)
public void testHTableExistsMethodMultipleRegionsMultipleGets() throws Exception {
HTable table = TEST_UTIL.createTable(
TableName.valueOf("testHTableExistsMethodMultipleRegionsMultipleGets"),
@@ -401,7 +401,7 @@ public class TestFromClientSide3 {
assertEquals(results[2], false);
}
- @Test
+ @Test (timeout=300000)
public void testGetEmptyRow() throws Exception {
//Create a table and put in 1 row
Admin admin = TEST_UTIL.getHBaseAdmin();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java
index f5807c2..d47975b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java
@@ -59,7 +59,7 @@ public class TestFromClientSideNoCodec {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testBasics() throws IOException {
final byte [] t = Bytes.toBytes("testBasics");
final byte [][] fs = new byte[][] {Bytes.toBytes("cf1"), Bytes.toBytes("cf2"),
@@ -93,7 +93,7 @@ public class TestFromClientSideNoCodec {
assertTrue(count == 1);
}
- @Test
+ @Test (timeout=180000)
public void testNoCodec() {
Configuration c = new Configuration();
c.set("hbase.client.default.rpc.codec", "");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java
index fbca881..e457fd1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java
@@ -67,7 +67,7 @@ public class TestHBaseAdminNoCluster {
* @throws MasterNotRunningException
* @throws ServiceException
*/
- @Test
+ @Test (timeout=60000)
public void testMasterMonitorCallableRetries()
throws MasterNotRunningException, ZooKeeperConnectionException, IOException, ServiceException {
Configuration configuration = HBaseConfiguration.create();
@@ -105,7 +105,7 @@ public class TestHBaseAdminNoCluster {
}
}
- @Test
+ @Test (timeout=60000)
public void testMasterOperationsRetries() throws Exception {
// Admin.listTables()
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
index 219496f..367e097 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
@@ -148,7 +148,7 @@ public class TestHCM {
return HConnectionTestingUtility.getConnectionCount();
}
- @Test
+ @Test (timeout=180000)
public void testClusterConnection() throws IOException {
ThreadPoolExecutor otherPool = new ThreadPoolExecutor(1, 1,
5, TimeUnit.SECONDS,
@@ -211,7 +211,7 @@ public class TestHCM {
* Naive test to check that HConnection#getAdmin returns a properly constructed HBaseAdmin object
* @throws IOException Unable to construct admin
*/
- @Test
+ @Test (timeout=180000)
public void testAdminFactory() throws IOException {
Connection con1 = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration());
Admin admin = con1.getAdmin();
@@ -287,12 +287,12 @@ public class TestHCM {
* Test that we can handle connection close: it will trigger a retry, but the calls will
* finish.
*/
- @Test
+ @Test (timeout=180000)
public void testConnectionCloseAllowsInterrupt() throws Exception {
testConnectionClose(true);
}
- @Test
+ @Test (timeout=180000)
public void testConnectionNotAllowsInterrupt() throws Exception {
testConnectionClose(false);
}
@@ -305,7 +305,7 @@ public class TestHCM {
* succeeds. But the client won't wait that much, because 20 + 20 > 30, so the client
* timeouted when the server answers.
*/
- @Test
+ @Test (timeout=180000)
public void testOperationTimeout() throws Exception {
HTableDescriptor hdt = TEST_UTIL.createTableDescriptor("HCM-testOperationTimeout");
hdt.addCoprocessor(SleepAndFailFirstTime.class.getName());
@@ -416,7 +416,7 @@ public class TestHCM {
/**
* Test that connection can become idle without breaking everything.
*/
- @Test
+ @Test (timeout=180000)
public void testConnectionIdle() throws Exception {
TableName tableName = TableName.valueOf("HCM-testConnectionIdle");
TEST_UTIL.createTable(tableName, FAM_NAM).close();
@@ -477,7 +477,7 @@ public class TestHCM {
* notification.
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testConnectionCut() throws Exception {
if (!isJavaOk){
// This test requires jdk 1.7+
@@ -570,7 +570,7 @@ public class TestHCM {
}
}
- @Test
+ @Test (timeout=180000)
public void abortingHConnectionRemovesItselfFromHCM() throws Exception {
// Save off current HConnections
Map oldHBaseInstances =
@@ -597,7 +597,7 @@ public class TestHCM {
* that we really delete it.
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testRegionCaching() throws Exception{
TEST_UTIL.createMultiRegionTable(TABLE_NAME, FAM_NAM).close();
Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
@@ -785,7 +785,7 @@ public class TestHCM {
* Test that Connection or Pool are not closed when managed externally
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testConnectionManagement() throws Exception{
Table table0 = TEST_UTIL.createTable(TABLE_NAME1, FAM_NAM);
Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration());
@@ -852,7 +852,7 @@ public class TestHCM {
* Make sure that {@link Configuration} instances that are essentially the
* same map to the same {@link HConnection} instance.
*/
- @Test
+ @Test (timeout=180000)
public void testConnectionSameness() throws Exception {
Connection previousConnection = null;
for (int i = 0; i < 2; i++) {
@@ -884,7 +884,7 @@ public class TestHCM {
* @deprecated Tests deprecated functionality. Remove in 1.0.
*/
@Deprecated
- @Test
+ @Test (timeout=180000)
public void testConnectionUniqueness() throws Exception {
int zkmaxconnections = TEST_UTIL.getConfiguration().
getInt(HConstants.ZOOKEEPER_MAX_CLIENT_CNXNS,
@@ -930,7 +930,7 @@ public class TestHCM {
}
}
- @Test
+ @Test (timeout=180000)
public void testClosing() throws Exception {
Configuration configuration =
new Configuration(TEST_UTIL.getConfiguration());
@@ -970,7 +970,7 @@ public class TestHCM {
* Trivial test to verify that nobody messes with
* {@link HConnectionManager#createConnection(Configuration)}
*/
- @Test
+ @Test (timeout=180000)
public void testCreateConnection() throws Exception {
Configuration configuration = TEST_UTIL.getConfiguration();
Connection c1 = ConnectionFactory.createConnection(configuration);
@@ -1129,7 +1129,7 @@ public class TestHCM {
}
}
- @Ignore ("Test presumes RETRY_BACKOFF will never change; it has") @Test
+ @Ignore ("Test presumes RETRY_BACKOFF will never change; it has") @Test (timeout=180000)
public void testErrorBackoffTimeCalculation() throws Exception {
// TODO: This test would seem to presume hardcoded RETRY_BACKOFF which it should not.
final long ANY_PAUSE = 100;
@@ -1205,7 +1205,7 @@ public class TestHCM {
* zk connections.
* @throws Exception
*/
- @Ignore ("Flakey test: See HBASE-8996")@Test
+ @Ignore ("Flakey test: See HBASE-8996")@Test (timeout=180000)
public void testDeleteForZKConnLeak() throws Exception {
TEST_UTIL.createTable(TABLE_NAME4, FAM_NAM);
final Configuration config = HBaseConfiguration.create(TEST_UTIL.getConfiguration());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java
index 26fe485..fc05707 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java
@@ -81,7 +81,7 @@ public class TestHTableMultiplexer {
Bytes.toStringBinary(r.getValue(FAMILY, QUALIFIER)));
}
- @Test
+ @Test (timeout=300000)
public void testHTableMultiplexer() throws Exception {
TableName TABLE_1 = TableName.valueOf("testHTableMultiplexer_1");
TableName TABLE_2 = TableName.valueOf("testHTableMultiplexer_2");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java
index 2898369..a4288c7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java
@@ -81,7 +81,7 @@ public class TestHTableMultiplexerFlushCache {
Bytes.toStringBinary(r.getValue(family, quality)));
}
- @Test
+ @Test (timeout=300000)
public void testOnRegionChange() throws Exception {
TableName TABLE = TableName.valueOf("testOnRegionChange");
final int NUM_REGIONS = 10;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIntraRowPagination.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIntraRowPagination.java
index 1f6dc98..f383e83 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIntraRowPagination.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIntraRowPagination.java
@@ -47,7 +47,7 @@ public class TestIntraRowPagination {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testScanLimitAndOffset() throws Exception {
//byte [] TABLE = HTestConst.DEFAULT_TABLE_BYTES;
byte [][] ROWS = HTestConst.makeNAscii(HTestConst.DEFAULT_ROW_BYTES, 2);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java
index e195baf..a7607ec 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java
@@ -65,7 +65,7 @@ public class TestMetaScanner {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testMetaScanner() throws Exception {
LOG.info("Starting testMetaScanner");
@@ -110,7 +110,7 @@ public class TestMetaScanner {
table.close();
}
- @Test
+ @Test (timeout=180000)
public void testConcurrentMetaScannerAndCatalogJanitor() throws Throwable {
/* TEST PLAN: start with only one region in a table. Have a splitter
* thread and metascanner threads that continously scan the meta table for regions.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java
index b83dc81..a13f437 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java
@@ -104,13 +104,13 @@ public class TestMetaWithReplicas {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testMetaHTDReplicaCount() throws Exception {
assertTrue(TEST_UTIL.getHBaseAdmin().getTableDescriptor(TableName.META_TABLE_NAME)
.getRegionReplication() == 3);
}
- @Test
+ @Test (timeout=180000)
public void testZookeeperNodesForReplicas() throws Exception {
// Checks all the znodes exist when meta's replicas are enabled
ZooKeeperWatcher zkw = TEST_UTIL.getZooKeeperWatcher();
@@ -133,7 +133,7 @@ public class TestMetaWithReplicas {
}
}
- @Test
+ @Test (timeout=180000)
public void testShutdownHandling() throws Exception {
// This test creates a table, flushes the meta (with 3 replicas), kills the
// server holding the primary meta replica. Then it does a put/get into/from
@@ -223,7 +223,7 @@ public class TestMetaWithReplicas {
assertTrue(Arrays.equals(r.getRow(), row));
}
- @Test
+ @Test (timeout=180000)
public void testMetaLookupThreadPoolCreated() throws Exception {
byte[] TABLE = Bytes.toBytes("testMetaLookupThreadPoolCreated");
byte[][] FAMILIES = new byte[][] { Bytes.toBytes("foo") };
@@ -240,7 +240,7 @@ public class TestMetaWithReplicas {
assert(ex != null);
}
- @Test
+ @Test (timeout=180000)
public void testChangingReplicaCount() throws Exception {
// tests changing the replica count across master restarts
// reduce the replica count from 3 to 2
@@ -309,13 +309,13 @@ public class TestMetaWithReplicas {
+ "(" + metaZnodes.toString() + ")";
}
- @Test
+ @Test (timeout=180000)
public void testHBaseFsckWithMetaReplicas() throws Exception {
HBaseFsck hbck = HbckTestingUtil.doFsck(TEST_UTIL.getConfiguration(), false);
HbckTestingUtil.assertNoErrors(hbck);
}
- @Test
+ @Test (timeout=180000)
public void testHBaseFsckWithFewerMetaReplicas() throws Exception {
ClusterConnection c = (ClusterConnection)ConnectionFactory.createConnection(
TEST_UTIL.getConfiguration());
@@ -333,7 +333,7 @@ public class TestMetaWithReplicas {
assertErrors(hbck, new ERROR_CODE[]{});
}
- @Test
+ @Test (timeout=180000)
public void testHBaseFsckWithFewerMetaReplicaZnodes() throws Exception {
ClusterConnection c = (ClusterConnection)ConnectionFactory.createConnection(
TEST_UTIL.getConfiguration());
@@ -353,7 +353,7 @@ public class TestMetaWithReplicas {
assertErrors(hbck, new ERROR_CODE[]{});
}
- @Test
+ @Test (timeout=180000)
public void testAccessingUnknownTables() throws Exception {
Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
conf.setBoolean(HConstants.USE_META_REPLICAS, true);
@@ -367,7 +367,7 @@ public class TestMetaWithReplicas {
fail("Expected TableNotFoundException");
}
- @Test
+ @Test (timeout=180000)
public void testMetaAddressChange() throws Exception {
// checks that even when the meta's location changes, the various
// caches update themselves. Uses the master operations to test
@@ -406,7 +406,7 @@ public class TestMetaWithReplicas {
assertTrue(TEST_UTIL.getHBaseAdmin().isTableDisabled("randomTable5678"));
}
- @Test
+ @Test (timeout=180000)
public void testShutdownOfReplicaHolder() throws Exception {
// checks that the when the server holding meta replica is shut down, the meta replica
// can be recovered
@@ -427,7 +427,7 @@ public class TestMetaWithReplicas {
assertTrue(i != 3);
}
- @Test
+ @Test (timeout=180000)
public void testHBaseFsckWithExcessMetaReplicas() throws Exception {
HBaseFsck hbck = new HBaseFsck(TEST_UTIL.getConfiguration());
// Create a meta replica (this will be the 4th one) and assign it
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
index abea699..45a36f6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
@@ -205,7 +205,7 @@ public class TestMultiParallel {
table.close();
}
- @Test
+ @Test (timeout=180000)
public void testBadFam() throws Exception {
LOG.info("test=testBadFam");
Table table = UTIL.getConnection().getTable(TEST_TABLE);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
index abb919f..c3f154f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
@@ -80,7 +80,7 @@ public class TestMultipleTimestamps {
// Nothing to do.
}
- @Test
+ @Test (timeout=300000)
public void testReseeksWithOneColumnMiltipleTimestamp() throws IOException {
TableName TABLE =
TableName.valueOf("testReseeksWithOne" +
@@ -121,7 +121,7 @@ public class TestMultipleTimestamps {
ht.close();
}
- @Test
+ @Test (timeout=300000)
public void testReseeksWithMultipleColumnOneTimestamp() throws IOException {
LOG.info("testReseeksWithMultipleColumnOneTimestamp");
TableName TABLE =
@@ -161,7 +161,7 @@ public class TestMultipleTimestamps {
ht.close();
}
- @Test
+ @Test (timeout=300000)
public void testReseeksWithMultipleColumnMultipleTimestamp() throws
IOException {
LOG.info("testReseeksWithMultipleColumnMultipleTimestamp");
@@ -216,7 +216,7 @@ public class TestMultipleTimestamps {
ht.close();
}
- @Test
+ @Test (timeout=300000)
public void testReseeksWithMultipleFiles() throws IOException {
LOG.info("testReseeksWithMultipleFiles");
TableName TABLE =
@@ -276,7 +276,7 @@ public class TestMultipleTimestamps {
ht.close();
}
- @Test
+ @Test (timeout=300000)
public void testWithVersionDeletes() throws Exception {
// first test from memstore (without flushing).
@@ -319,7 +319,7 @@ public class TestMultipleTimestamps {
ht.close();
}
- @Test
+ @Test (timeout=300000)
public void testWithMultipleVersionDeletes() throws IOException {
LOG.info("testWithMultipleVersionDeletes");
@@ -347,7 +347,7 @@ public class TestMultipleTimestamps {
ht.close();
}
- @Test
+ @Test (timeout=300000)
public void testWithColumnDeletes() throws IOException {
TableName TABLE =
TableName.valueOf("testWithColumnDeletes");
@@ -373,7 +373,7 @@ public class TestMultipleTimestamps {
ht.close();
}
- @Test
+ @Test (timeout=300000)
public void testWithFamilyDeletes() throws IOException {
TableName TABLE =
TableName.valueOf("testWithFamilyDeletes");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
index c46056d..6d52307 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
@@ -43,7 +43,7 @@ public class TestPutDeleteEtcCellIteration {
private static final long TIMESTAMP = System.currentTimeMillis();
private static final int COUNT = 10;
- @Test
+ @Test (timeout=60000)
public void testPutIteration() throws IOException {
Put p = new Put(ROW);
for (int i = 0; i < COUNT; i++) {
@@ -78,7 +78,7 @@ public class TestPutDeleteEtcCellIteration {
assertEquals(COUNT, index);
}
- @Test
+ @Test (timeout=60000)
public void testDeleteIteration() throws IOException {
Delete d = new Delete(ROW);
for (int i = 0; i < COUNT; i++) {
@@ -94,7 +94,7 @@ public class TestPutDeleteEtcCellIteration {
assertEquals(COUNT, index);
}
- @Test
+ @Test (timeout=60000)
public void testAppendIteration() throws IOException {
Append a = new Append(ROW);
for (int i = 0; i < COUNT; i++) {
@@ -112,7 +112,7 @@ public class TestPutDeleteEtcCellIteration {
assertEquals(COUNT, index);
}
- @Test
+ @Test (timeout=60000)
public void testIncrementIteration() throws IOException {
Increment increment = new Increment(ROW);
for (int i = 0; i < COUNT; i++) {
@@ -132,7 +132,7 @@ public class TestPutDeleteEtcCellIteration {
assertEquals(COUNT, index);
}
- @Test
+ @Test (timeout=60000)
public void testResultIteration() throws IOException {
Cell [] cells = new Cell[COUNT];
for(int i = 0; i < COUNT; i++) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutWithDelete.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutWithDelete.java
index 0e819bb..e2ed93c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutWithDelete.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutWithDelete.java
@@ -49,7 +49,7 @@ public class TestPutWithDelete {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testHbasePutDeleteCell() throws Exception {
final TableName tableName = TableName.valueOf("TestPutWithDelete");
final byte[] rowKey = Bytes.toBytes("12345");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
index efc8db2..646e117 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
@@ -283,7 +283,7 @@ public class TestReplicasClient {
TestRegionServerNoMaster.flushRegion(HTU, regionInfo);
}
- @Test
+ @Test (timeout=180000)
public void testUseRegionWithoutReplica() throws Exception {
byte[] b1 = "testUseRegionWithoutReplica".getBytes();
openRegion(hriSecondary);
@@ -297,7 +297,7 @@ public class TestReplicasClient {
}
}
- @Test
+ @Test (timeout=180000)
public void testLocations() throws Exception {
byte[] b1 = "testLocations".getBytes();
openRegion(hriSecondary);
@@ -322,7 +322,7 @@ public class TestReplicasClient {
}
}
- @Test
+ @Test (timeout=180000)
public void testGetNoResultNoStaleRegionWithReplica() throws Exception {
byte[] b1 = "testGetNoResultNoStaleRegionWithReplica".getBytes();
openRegion(hriSecondary);
@@ -338,7 +338,7 @@ public class TestReplicasClient {
}
- @Test
+ @Test (timeout=180000)
public void testGetNoResultStaleRegionWithReplica() throws Exception {
byte[] b1 = "testGetNoResultStaleRegionWithReplica".getBytes();
openRegion(hriSecondary);
@@ -355,7 +355,7 @@ public class TestReplicasClient {
}
}
- @Test
+ @Test (timeout=180000)
public void testGetNoResultNotStaleSleepRegionWithReplica() throws Exception {
byte[] b1 = "testGetNoResultNotStaleSleepRegionWithReplica".getBytes();
openRegion(hriSecondary);
@@ -374,7 +374,7 @@ public class TestReplicasClient {
}
- @Test
+ @Test (timeout=180000)
public void testFlushTable() throws Exception {
openRegion(hriSecondary);
try {
@@ -394,7 +394,7 @@ public class TestReplicasClient {
}
}
- @Test
+ @Test (timeout=180000)
public void testFlushPrimary() throws Exception {
openRegion(hriSecondary);
@@ -413,7 +413,7 @@ public class TestReplicasClient {
}
}
- @Test
+ @Test (timeout=180000)
public void testFlushSecondary() throws Exception {
openRegion(hriSecondary);
try {
@@ -432,7 +432,7 @@ public class TestReplicasClient {
}
}
- @Test
+ @Test (timeout=180000)
public void testUseRegionWithReplica() throws Exception {
byte[] b1 = "testUseRegionWithReplica".getBytes();
openRegion(hriSecondary);
@@ -525,7 +525,7 @@ public class TestReplicasClient {
}
}
- @Test
+ @Test (timeout=180000)
public void testCancelOfMultiGet() throws Exception {
openRegion(hriSecondary);
try {
@@ -590,25 +590,25 @@ public class TestReplicasClient {
}
}
- @Test
+ @Test (timeout=180000)
public void testScanWithReplicas() throws Exception {
//simple scan
runMultipleScansOfOneType(false, false);
}
- @Test
+ @Test (timeout=180000)
public void testSmallScanWithReplicas() throws Exception {
//small scan
runMultipleScansOfOneType(false, true);
}
- @Test
+ @Test (timeout=180000)
public void testReverseScanWithReplicas() throws Exception {
//reverse scan
runMultipleScansOfOneType(true, false);
}
- @Test
+ @Test (timeout=180000)
public void testCancelOfScan() throws Exception {
openRegion(hriSecondary);
int NUMROWS = 100;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java
index c5e6449..65c8aee 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java
@@ -137,7 +137,7 @@ public class TestRestoreSnapshotFromClient {
SnapshotTestingUtils.deleteArchiveDirectory(TEST_UTIL);
}
- @Test
+ @Test (timeout=300000)
public void testRestoreSnapshot() throws IOException {
SnapshotTestingUtils.verifyRowCount(TEST_UTIL, tableName, snapshot1Rows);
admin.disableTable(tableName);
@@ -173,7 +173,7 @@ public class TestRestoreSnapshotFromClient {
return 1;
}
- @Test
+ @Test (timeout=300000)
public void testRestoreSchemaChange() throws Exception {
byte[] TEST_FAMILY2 = Bytes.toBytes("cf2");
@@ -227,7 +227,7 @@ public class TestRestoreSnapshotFromClient {
table.close();
}
- @Test
+ @Test (timeout=300000)
public void testCloneSnapshotOfCloned() throws IOException, InterruptedException {
TableName clonedTableName =
TableName.valueOf("clonedtb-" + System.currentTimeMillis());
@@ -245,7 +245,7 @@ public class TestRestoreSnapshotFromClient {
TEST_UTIL.deleteTable(clonedTableName);
}
- @Test
+ @Test (timeout=300000)
public void testCloneAndRestoreSnapshot() throws IOException, InterruptedException {
TEST_UTIL.deleteTable(tableName);
waitCleanerRun();
@@ -262,7 +262,7 @@ public class TestRestoreSnapshotFromClient {
SnapshotTestingUtils.verifyReplicasCameOnline(tableName, admin, getNumReplicas());
}
- @Test
+ @Test (timeout=300000)
public void testCorruptedSnapshot() throws IOException, InterruptedException {
SnapshotTestingUtils.corruptSnapshot(TEST_UTIL, Bytes.toString(snapshotName0));
TableName cloneName = TableName.valueOf("corruptedClone-" + System.currentTimeMillis());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
index dcf26f2..3b74d12 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
@@ -118,7 +118,7 @@ public class TestRpcControllerFactory {
* won't be sure to add them here. So we just can cover the major ones.
* @throws Exception on failure
*/
- @Test
+ @Test (timeout=180000)
public void testCountController() throws Exception {
Configuration conf = new Configuration(UTIL.getConfiguration());
// setup our custom controller
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
index a6c1cfe..105ce30 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
@@ -100,7 +100,7 @@ public class TestScannersFromClientSide {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testScanBatch() throws Exception {
TableName TABLE = TableName.valueOf("testScanBatch");
byte [][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 8);
@@ -175,7 +175,7 @@ public class TestScannersFromClientSide {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testGetMaxResults() throws Exception {
byte [] TABLE = Bytes.toBytes("testGetMaxResults");
byte [][] FAMILIES = HTestConst.makeNAscii(FAMILY, 3);
@@ -295,7 +295,7 @@ public class TestScannersFromClientSide {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testScanMaxResults() throws Exception {
byte [] TABLE = Bytes.toBytes("testScanLimit");
byte [][] ROWS = HTestConst.makeNAscii(ROW, 2);
@@ -345,7 +345,7 @@ public class TestScannersFromClientSide {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testGetRowOffset() throws Exception {
byte [] TABLE = Bytes.toBytes("testGetRowOffset");
byte [][] FAMILIES = HTestConst.makeNAscii(FAMILY, 3);
@@ -441,7 +441,7 @@ public class TestScannersFromClientSide {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testScanOnReopenedRegion() throws Exception {
TableName TABLE = TableName.valueOf("testScanOnReopenedRegion");
byte [][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 2);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java
index 0f0baff..6fa3d22 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java
@@ -110,17 +110,17 @@ public class TestTableSnapshotScanner {
table.close();
}
- @Test
+ @Test (timeout=300000)
public void testWithSingleRegion() throws Exception {
testScanner(UTIL, "testWithSingleRegion", 1, false);
}
- @Test
+ @Test (timeout=300000)
public void testWithMultiRegion() throws Exception {
testScanner(UTIL, "testWithMultiRegion", 10, false);
}
- @Test
+ @Test (timeout=300000)
public void testWithOfflineHBaseMultiRegion() throws Exception {
testScanner(UTIL, "testWithMultiRegion", 20, true);
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
index 4843715..596f477 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
@@ -90,7 +90,7 @@ public class TestTimestampsFilter {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testTimestampsFilter() throws Exception {
byte [] TABLE = Bytes.toBytes("testTimestampsFilter");
byte [] FAMILY = Bytes.toBytes("event_log");
@@ -166,7 +166,7 @@ public class TestTimestampsFilter {
ht.close();
}
- @Test
+ @Test (timeout=180000)
public void testMultiColumns() throws Exception {
byte [] TABLE = Bytes.toBytes("testTimestampsFilterMultiColumns");
byte [] FAMILY = Bytes.toBytes("event_log");
@@ -216,7 +216,7 @@ public class TestTimestampsFilter {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testWithVersionDeletes() throws Exception {
// first test from memstore (without flushing).
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestUpdateConfiguration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestUpdateConfiguration.java
index 73e493b..a929c8c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestUpdateConfiguration.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestUpdateConfiguration.java
@@ -46,7 +46,7 @@ public class TestUpdateConfiguration {
TEST_UTIL.startMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testOnlineConfigChange() throws IOException {
LOG.debug("Starting the test");
Admin admin = TEST_UTIL.getHBaseAdmin();
@@ -54,7 +54,7 @@ public class TestUpdateConfiguration {
admin.updateConfiguration(server);
}
- @Test
+ @Test (timeout=180000)
public void testMasterOnlineConfigChange() throws IOException {
LOG.debug("Starting the test");
Path cnfPath = FileSystems.getDefault().getPath("target/test-classes/hbase-site.xml");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java
index 4db646e..4ae26a8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java
@@ -73,7 +73,7 @@ public class TestReplicationAdmin {
* all interactions with ZK work
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testAddRemovePeer() throws Exception {
// Add a valid peer
admin.addPeer(ID_ONE, KEY_ONE);
@@ -110,7 +110,7 @@ public class TestReplicationAdmin {
* basic checks that when we add a peer that it is enabled, and that we can disable
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testEnableDisable() throws Exception {
admin.addPeer(ID_ONE, KEY_ONE);
assertEquals(1, admin.getPeersCount());
@@ -126,7 +126,7 @@ public class TestReplicationAdmin {
admin.removePeer(ID_ONE);
}
- @Test
+ @Test (timeout=180000)
public void testGetTableCfsStr() {
// opposite of TestPerTableCFReplication#testParseTableCFsFromConfig()
@@ -157,7 +157,7 @@ public class TestReplicationAdmin {
assertEquals("tab1;tab2:cf1;tab3:cf1,cf3", ReplicationAdmin.getTableCfsStr(tabCFsMap));
}
- @Test
+ @Test (timeout=180000)
public void testAppendPeerTableCFs() throws Exception {
// Add a valid peer
admin.addPeer(ID_ONE, KEY_ONE);
@@ -175,7 +175,7 @@ public class TestReplicationAdmin {
admin.removePeer(ID_ONE);
}
- @Test
+ @Test (timeout=180000)
public void testRemovePeerTableCFs() throws Exception {
// Add a valid peer
admin.addPeer(ID_ONE, KEY_ONE);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java
index b51de80..03c9957 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java
@@ -45,7 +45,7 @@ import com.google.common.io.CountingOutputStream;
public class TestCellMessageCodec {
public static final Log LOG = LogFactory.getLog(TestCellMessageCodec.class);
- @Test
+ @Test (timeout=60000)
public void testEmptyWorks() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
CountingOutputStream cos = new CountingOutputStream(baos);
@@ -64,7 +64,7 @@ public class TestCellMessageCodec {
assertEquals(0, cis.getCount());
}
- @Test
+ @Test (timeout=60000)
public void testOne() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
CountingOutputStream cos = new CountingOutputStream(baos);
@@ -86,7 +86,7 @@ public class TestCellMessageCodec {
assertEquals(offset, cis.getCount());
}
- @Test
+ @Test (timeout=60000)
public void testThree() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
CountingOutputStream cos = new CountingOutputStream(baos);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/conf/TestConfigurationManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/conf/TestConfigurationManager.java
index fe56344..f1fd720 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/conf/TestConfigurationManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/conf/TestConfigurationManager.java
@@ -68,7 +68,7 @@ public class TestConfigurationManager {
* Test if observers get notified by the ConfigurationManager
* when the Configuration is reloaded.
*/
- @Test
+ @Test (timeout=60000)
public void testCheckIfObserversNotified() {
Configuration conf = new Configuration();
ConfigurationManager cm = new ConfigurationManager();
@@ -104,7 +104,7 @@ public class TestConfigurationManager {
/**
* Test if out-of-scope observers are deregistered on GC.
*/
- @Test
+ @Test (timeout=60000)
public void testDeregisterOnOutOfScope() {
Configuration conf = new Configuration();
ConfigurationManager cm = new ConfigurationManager();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
index 96da03a..70d6a9f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
@@ -70,7 +70,7 @@ public class TestConstraint {
* @throws Exception
*/
@SuppressWarnings("unchecked")
- @Test
+ @Test (timeout=180000)
public void testConstraintPasses() throws Exception {
// create the table
// it would be nice if this was also a method on the util
@@ -139,7 +139,7 @@ public class TestConstraint {
* @throws Throwable
*/
@SuppressWarnings("unchecked")
- @Test
+ @Test (timeout=180000)
public void testDisableConstraint() throws Throwable {
// create the table
HTableDescriptor desc = new HTableDescriptor(tableName);
@@ -174,7 +174,7 @@ public class TestConstraint {
* @throws Throwable
*/
@SuppressWarnings("unchecked")
- @Test
+ @Test (timeout=180000)
public void testDisableConstraints() throws Throwable {
// create the table
HTableDescriptor desc = new HTableDescriptor(tableName);
@@ -206,7 +206,7 @@ public class TestConstraint {
* Check to make sure a constraint is unloaded when it fails
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testIsUnloaded() throws Exception {
// create the table
HTableDescriptor desc = new HTableDescriptor(tableName);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraints.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraints.java
index afd55bb..0a84500 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraints.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraints.java
@@ -43,7 +43,7 @@ import org.junit.experimental.categories.Category;
public class TestConstraints {
@SuppressWarnings("unchecked")
- @Test
+ @Test (timeout=60000)
public void testSimpleReadWrite() throws Throwable {
HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table"));
Constraints.add(desc, WorksConstraint.class);
@@ -68,7 +68,7 @@ public class TestConstraints {
}
@SuppressWarnings("unchecked")
- @Test
+ @Test (timeout=60000)
public void testReadWriteWithConf() throws Throwable {
HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table"));
Constraints.add(
@@ -101,7 +101,7 @@ public class TestConstraints {
* @throws Exception
*/
@SuppressWarnings("unchecked")
- @Test
+ @Test (timeout=60000)
public void testEnableDisableRemove() throws Exception {
HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table"));
// check general enabling/disabling of constraints
@@ -137,7 +137,7 @@ public class TestConstraints {
* @throws Exception
*/
@SuppressWarnings("unchecked")
- @Test
+ @Test (timeout=60000)
public void testUpdateConstraint() throws Exception {
HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table"));
Constraints.add(desc, CheckConfigurationConstraint.class,
@@ -163,14 +163,14 @@ public class TestConstraints {
* @throws Throwable
* on failure.
*/
- @Test
+ @Test (timeout=60000)
public void testRemoveUnsetConstraint() throws Throwable {
HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("table"));
Constraints.remove(desc);
Constraints.remove(desc, AlsoWorks.class);
}
- @Test
+ @Test (timeout=60000)
public void testConfigurationPreserved() throws Throwable {
Configuration conf = new Configuration();
conf.setBoolean("_ENABLED", false);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
index a5ee4c1..e596d83 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
@@ -108,7 +108,7 @@ public class TestBatchCoprocessorEndpoint {
util.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testAggregationNullResponse() throws Throwable {
Table table = util.getConnection().getTable(TEST_TABLE);
ColumnAggregationWithNullResponseProtos.SumRequest.Builder builder =
@@ -161,7 +161,7 @@ public class TestBatchCoprocessorEndpoint {
builder.build(), start, end, ColumnAggregationProtos.SumResponse.getDefaultInstance());
}
- @Test
+ @Test (timeout=180000)
public void testAggregationWithReturnValue() throws Throwable {
Table table = util.getConnection().getTable(TEST_TABLE);
Map results = sum(table, TEST_FAMILY, TEST_QUALIFIER, ROWS[0],
@@ -197,7 +197,7 @@ public class TestBatchCoprocessorEndpoint {
table.close();
}
- @Test
+ @Test (timeout=180000)
public void testAggregation() throws Throwable {
Table table = util.getConnection().getTable(TEST_TABLE);
Map results = sum(table, TEST_FAMILY, TEST_QUALIFIER,
@@ -230,7 +230,7 @@ public class TestBatchCoprocessorEndpoint {
table.close();
}
- @Test
+ @Test (timeout=180000)
public void testAggregationWithErrors() throws Throwable {
Table table = util.getConnection().getTable(TEST_TABLE);
final Map results =
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java
index ac75660..5152aa6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java
@@ -684,7 +684,7 @@ public class TestBigDecimalColumnInterpreter {
// null column family, and max will be set to 0
}
- @Test
+ @Test (timeout=180000)
public void testStdWithInvalidRange() {
AggregationClient aClient = new AggregationClient(conf);
Scan scan = new Scan();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
index 140c3b9..723c814 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
@@ -111,7 +111,7 @@ public class TestClassLoading {
TEST_UTIL.getDataTestDir().toString(), className, code);
}
- @Test
+ @Test (timeout=180000)
// HBASE-3516: Test CP Class loading from HDFS
public void testClassLoadingFromHDFS() throws Exception {
FileSystem fs = cluster.getFileSystem();
@@ -218,7 +218,7 @@ public class TestClassLoading {
return new Path(file.toURI()).toString();
}
- @Test
+ @Test (timeout=180000)
// HBASE-3516: Test CP Class loading from local file system
public void testClassLoadingFromLocalFS() throws Exception {
File jarFile = buildCoprocessorJar(cpName3);
@@ -244,7 +244,7 @@ public class TestClassLoading {
assertTrue("Class " + cpName3 + " was missing on a region", found);
}
- @Test
+ @Test (timeout=180000)
// HBASE-6308: Test CP classloader is the CoprocessorClassLoader
public void testPrivateClassLoader() throws Exception {
File jarFile = buildCoprocessorJar(cpName4);
@@ -275,7 +275,7 @@ public class TestClassLoading {
assertTrue("Class " + cpName4 + " was missing on a region", found);
}
- @Test
+ @Test (timeout=180000)
// HBase-3810: Registering a Coprocessor at HTableDescriptor should be
// less strict
public void testHBase3810() throws Exception {
@@ -370,12 +370,12 @@ public class TestClassLoading {
assertFalse("Configuration key 'k4' wasn't configured", found6_k4);
}
- @Test
+ @Test (timeout=180000)
public void testClassLoadingFromLibDirInJar() throws Exception {
loadingClassFromLibDirInJar("/lib/");
}
- @Test
+ @Test (timeout=180000)
public void testClassLoadingFromRelativeLibDirInJar() throws Exception {
loadingClassFromLibDirInJar("lib/");
}
@@ -447,7 +447,7 @@ public class TestClassLoading {
assertTrue("Configuration key 'k3' was missing on a region", found2_k3);
}
- @Test
+ @Test (timeout=180000)
public void testRegionServerCoprocessorsReported() throws Exception {
// This was a test for HBASE-4070.
// We are removing coprocessors from region load in HBASE-5258.
@@ -518,7 +518,7 @@ public class TestClassLoading {
assertTrue(success);
}
- @Test
+ @Test (timeout=180000)
public void testMasterCoprocessorsReported() {
// HBASE 4070: Improve region server metrics to report loaded coprocessors
// to master: verify that the master is reporting the correct set of
@@ -531,7 +531,7 @@ public class TestClassLoading {
assertEquals(loadedMasterCoprocessorsVerify, loadedMasterCoprocessors);
}
- @Test
+ @Test (timeout=180000)
public void testFindCoprocessors() {
// HBASE 12277:
CoprocessorHost masterCpHost =
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java
index fb2f20c..b9a217d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java
@@ -94,7 +94,7 @@ public class TestCoprocessorConfiguration {
public void stop(CoprocessorEnvironment env) throws IOException { }
}
- @Test
+ @Test (timeout=60000)
public void testRegionCoprocessorHostDefaults() throws Exception {
Configuration conf = new Configuration(CONF);
HRegion region = mock(HRegion.class);
@@ -113,7 +113,7 @@ public class TestCoprocessorConfiguration {
CoprocessorHost.DEFAULT_USER_COPROCESSORS_ENABLED);
}
- @Test
+ @Test (timeout=60000)
public void testRegionServerCoprocessorHostDefaults() throws Exception {
Configuration conf = new Configuration(CONF);
RegionServerServices rsServices = mock(RegionServerServices.class);
@@ -124,7 +124,7 @@ public class TestCoprocessorConfiguration {
CoprocessorHost.DEFAULT_COPROCESSORS_ENABLED);
}
- @Test
+ @Test (timeout=60000)
public void testMasterCoprocessorHostDefaults() throws Exception {
Configuration conf = new Configuration(CONF);
MasterServices masterServices = mock(MasterServices.class);
@@ -135,7 +135,7 @@ public class TestCoprocessorConfiguration {
CoprocessorHost.DEFAULT_COPROCESSORS_ENABLED);
}
- @Test
+ @Test (timeout=60000)
public void testRegionCoprocessorHostAllDisabled() throws Exception {
Configuration conf = new Configuration(CONF);
conf.setBoolean(CoprocessorHost.COPROCESSORS_ENABLED_CONF_KEY, false);
@@ -152,7 +152,7 @@ public class TestCoprocessorConfiguration {
tableCoprocessorLoaded.get());
}
- @Test
+ @Test (timeout=60000)
public void testRegionCoprocessorHostTableLoadingDisabled() throws Exception {
Configuration conf = new Configuration(CONF);
conf.setBoolean(CoprocessorHost.COPROCESSORS_ENABLED_CONF_KEY, true); // if defaults change
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
index a3e0c91..4e165a0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
@@ -136,7 +136,7 @@ public class TestCoprocessorEndpoint {
});
}
- @Test
+ @Test (timeout=180000)
public void testAggregation() throws Throwable {
Table table = util.getConnection().getTable(TEST_TABLE);
Map results = sum(table, TEST_FAMILY, TEST_QUALIFIER,
@@ -170,7 +170,7 @@ public class TestCoprocessorEndpoint {
table.close();
}
- @Test
+ @Test (timeout=180000)
public void testCoprocessorService() throws Throwable {
HTable table = (HTable) util.getConnection().getTable(TEST_TABLE);
NavigableMap regions = table.getRegionLocations();
@@ -244,7 +244,7 @@ public class TestCoprocessorEndpoint {
}
}
- @Test
+ @Test (timeout=180000)
public void testCoprocessorServiceNullResponse() throws Throwable {
HTable table = (HTable) util.getConnection().getTable(TEST_TABLE);
NavigableMap regions = table.getRegionLocations();
@@ -282,7 +282,7 @@ public class TestCoprocessorEndpoint {
}
}
- @Test
+ @Test (timeout=180000)
public void testMasterCoprocessorService() throws Throwable {
Admin admin = util.getHBaseAdmin();
final TestProtos.EchoRequestProto request =
@@ -292,7 +292,7 @@ public class TestCoprocessorEndpoint {
assertEquals("hello", service.echo(null, request).getMessage());
}
- @Test
+ @Test (timeout=180000)
public void testCoprocessorError() throws Exception {
Configuration configuration = new Configuration(util.getConfiguration());
// Make it not retry forever
@@ -313,7 +313,7 @@ public class TestCoprocessorEndpoint {
}
}
- @Test
+ @Test (timeout=180000)
public void testMasterCoprocessorError() throws Throwable {
Admin admin = util.getHBaseAdmin();
TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface service =
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
index ce76e8a..63fa6d5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
@@ -274,7 +274,7 @@ public class TestCoprocessorInterface {
}
}
- @Test
+ @Test (timeout=60000)
public void testSharedData() throws IOException {
TableName tableName = TableName.valueOf(name.getMethodName());
byte [][] families = { fam1, fam2, fam3 };
@@ -353,7 +353,7 @@ public class TestCoprocessorInterface {
HBaseTestingUtility.closeRegionAndWAL(region);
}
- @Test
+ @Test (timeout=60000)
public void testCoprocessorInterface() throws IOException {
TableName tableName = TableName.valueOf(name.getMethodName());
byte [][] families = { fam1, fam2, fam3 };
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorStop.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorStop.java
index 2ef13f7..5a0c101 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorStop.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorStop.java
@@ -104,7 +104,7 @@ public class TestCoprocessorStop {
UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testStopped() throws Exception {
//shutdown hbase only. then check flag file.
MiniHBaseCluster cluster = UTIL.getHBaseCluster();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
index 7695361..5648b5c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
@@ -67,7 +67,7 @@ public class TestCoprocessorTableEndpoint {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testCoprocessorTableEndpoint() throws Throwable {
final TableName tableName = TableName.valueOf("testCoprocessorTableEndpoint");
@@ -79,7 +79,7 @@ public class TestCoprocessorTableEndpoint {
verifyTable(tableName);
}
- @Test
+ @Test (timeout=180000)
public void testDynamicCoprocessorTableEndpoint() throws Throwable {
final TableName tableName = TableName.valueOf("testDynamicCoprocessorTableEndpoint");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java
index baea95d..200af10 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java
@@ -682,7 +682,7 @@ public class TestDoubleColumnInterpreter {
// null column family, and max will be set to 0
}
- @Test
+ @Test (timeout=180000)
public void testStdWithInvalidRange() {
AggregationClient aClient = new AggregationClient(conf);
Scan scan = new Scan();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
index 317707a..ef123c7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
@@ -127,7 +127,7 @@ public class TestHTableWrapper {
}
}
- @Test
+ @Test (timeout=180000)
public void testHTableInterfaceMethods() throws Exception {
Configuration conf = util.getConfiguration();
MasterCoprocessorHost cpHost = util.getMiniHBaseCluster().getMaster().getMasterCoprocessorHost();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
index 094555e..2e998dd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
@@ -1179,7 +1179,7 @@ public class TestMasterObserver {
UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testStarted() throws Exception {
MiniHBaseCluster cluster = UTIL.getHBaseCluster();
@@ -1199,7 +1199,7 @@ public class TestMasterObserver {
cp.wasStartMasterCalled());
}
- @Test
+ @Test (timeout=180000)
public void testTableOperations() throws Exception {
MiniHBaseCluster cluster = UTIL.getHBaseCluster();
@@ -1365,7 +1365,7 @@ public class TestMasterObserver {
cp.wasDeleteTableHandlerCalled());
}
- @Test
+ @Test (timeout=180000)
public void testSnapshotOperations() throws Exception {
MiniHBaseCluster cluster = UTIL.getHBaseCluster();
HMaster master = cluster.getMaster();
@@ -1426,7 +1426,7 @@ public class TestMasterObserver {
}
}
- @Test
+ @Test (timeout=180000)
public void testNamespaceOperations() throws Exception {
MiniHBaseCluster cluster = UTIL.getHBaseCluster();
String testNamespace = "observed_ns";
@@ -1513,7 +1513,7 @@ public class TestMasterObserver {
}
}
- @Test
+ @Test (timeout=180000)
public void testRegionTransitionOperations() throws Exception {
MiniHBaseCluster cluster = UTIL.getHBaseCluster();
@@ -1615,7 +1615,7 @@ public class TestMasterObserver {
}
}
- @Test
+ @Test (timeout=180000)
public void testTableDescriptorsEnumeration() throws Exception {
MiniHBaseCluster cluster = UTIL.getHBaseCluster();
@@ -1633,7 +1633,7 @@ public class TestMasterObserver {
cp.wasGetTableDescriptorsCalled());
}
- @Test
+ @Test (timeout=180000)
public void testTableNamesEnumeration() throws Exception {
MiniHBaseCluster cluster = UTIL.getHBaseCluster();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
index 4c3594b..8286632 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
@@ -135,12 +135,12 @@ public class TestOpenTableInCoprocessor {
UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testCoprocessorCanCreateConnectionToRemoteTable() throws Throwable {
runCoprocessorConnectionToRemoteTable(SendToOtherTableCoprocessor.class, completed);
}
- @Test
+ @Test (timeout=180000)
public void testCoprocessorCanCreateConnectionToRemoteTableWithCustomPool() throws Throwable {
runCoprocessorConnectionToRemoteTable(CustomThreadPoolCoprocessor.class, completedWithPool);
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java
index a02758dac..826fb2e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java
@@ -91,7 +91,7 @@ public class TestRegionObserverBypass {
* do a single put that is bypassed by a RegionObserver
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testSimple() throws Exception {
Table t = util.getConnection().getTable(tableName);
Put p = new Put(row1);
@@ -106,7 +106,7 @@ public class TestRegionObserverBypass {
* Test various multiput operations.
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testMulti() throws Exception {
//ensure that server time increments every time we do an operation, otherwise
//previous deletes will eclipse successive puts having the same timestamp
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
index 6c7552a..95daad9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
@@ -162,7 +162,7 @@ public class TestRegionObserverScannerOpenHook {
return r;
}
- @Test
+ @Test (timeout=180000)
public void testRegionObserverScanTimeStacking() throws Exception {
byte[] ROW = Bytes.toBytes("testRow");
byte[] TABLE = Bytes.toBytes(getClass().getName());
@@ -187,7 +187,7 @@ public class TestRegionObserverScannerOpenHook {
HBaseTestingUtility.closeRegionAndWAL(region);
}
- @Test
+ @Test (timeout=180000)
public void testRegionObserverFlushTimeStacking() throws Exception {
byte[] ROW = Bytes.toBytes("testRow");
byte[] TABLE = Bytes.toBytes(getClass().getName());
@@ -245,7 +245,7 @@ public class TestRegionObserverScannerOpenHook {
* the usual compaction mechanism on the region, rather than going through the backdoor to the
* region
*/
- @Test
+ @Test (timeout=180000)
public void testRegionObserverCompactionTimeStacking() throws Exception {
// setup a mini cluster so we can do a real compaction on a region
Configuration conf = UTIL.getConfiguration();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java
index 7ae6787..d72408a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java
@@ -65,7 +65,7 @@ public class TestRegionServerCoprocessorEndpoint {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testEndpoint() throws Exception {
final ServerName serverName = TEST_UTIL.getHBaseCluster().getRegionServer(0).getServerName();
final ServerRpcController controller = new ServerRpcController();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerObserver.java
index 2e6eabc..e7062b4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerObserver.java
@@ -60,7 +60,7 @@ public class TestRegionServerObserver {
* Test verifies the hooks in regions merge.
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testCoprocessorHooksInRegionsMerge() throws Exception {
final int NUM_MASTERS = 1;
final int NUM_RS = 1;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
index 2136c3c..879212c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
@@ -151,7 +151,7 @@ public class TestRowProcessorEndpoint {
row2Size = put.size();
}
- @Test
+ @Test (timeout=180000)
public void testDoubleScan() throws Throwable {
prepareTestData();
@@ -173,7 +173,7 @@ public class TestRowProcessorEndpoint {
assertEquals(expected, result);
}
- @Test
+ @Test (timeout=180000)
public void testReadModifyWrite() throws Throwable {
prepareTestData();
failures.set(0);
@@ -234,7 +234,7 @@ public class TestRowProcessorEndpoint {
doneSignal.await();
}
- @Test
+ @Test (timeout=180000)
public void testMultipleRows() throws Throwable {
prepareTestData();
failures.set(0);
@@ -270,7 +270,7 @@ public class TestRowProcessorEndpoint {
service.process(null, request);
}
- @Test
+ @Test (timeout=180000)
public void testTimeout() throws Throwable {
prepareTestData();
CoprocessorRpcChannel channel = table.coprocessorService(ROW);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
index cdcdeed..91ac6b5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
@@ -161,7 +161,7 @@ public class TestWALObserver {
* WALEdit written to WAL, and ignore, modify, and add KeyValue's for the
* WALEdit.
*/
- @Test
+ @Test (timeout=180000)
public void testWALObserverWriteToWAL() throws Exception {
final WAL log = wals.getWAL(UNSPECIFIED_REGION);
verifyWritesSeen(log, getCoprocessor(log, SampleRegionWALObserver.class), false);
@@ -172,7 +172,7 @@ public class TestWALObserver {
* WALEdit written to WAL, and ignore, modify, and add KeyValue's for the
* WALEdit.
*/
- @Test
+ @Test (timeout=180000)
public void testLegacyWALObserverWriteToWAL() throws Exception {
final WAL log = wals.getWAL(UNSPECIFIED_REGION);
verifyWritesSeen(log, getCoprocessor(log, SampleRegionWALObserver.Legacy.class), true);
@@ -266,7 +266,7 @@ public class TestWALObserver {
assertEquals(seesLegacy, cp.isPostWALWriteDeprecatedCalled());
}
- @Test
+ @Test (timeout=180000)
public void testNonLegacyWALKeysDoNotExplode() throws Exception {
TableName tableName = TableName.valueOf(TEST_TABLE);
final HTableDescriptor htd = createBasic3FamilyHTD(Bytes
@@ -344,7 +344,7 @@ public class TestWALObserver {
/**
* Coprocessors shouldn't get notice of empty waledits.
*/
- @Test
+ @Test (timeout=180000)
public void testEmptyWALEditAreNotSeen() throws Exception {
final HRegionInfo hri = createBasic3FamilyHRegionInfo(Bytes.toString(TEST_TABLE));
final HTableDescriptor htd = createBasic3FamilyHTD(Bytes.toString(TEST_TABLE));
@@ -374,7 +374,7 @@ public class TestWALObserver {
/**
* Test WAL replay behavior with WALObserver.
*/
- @Test
+ @Test (timeout=180000)
public void testWALCoprocessorReplay() throws Exception {
// WAL replay is handled at HRegion::replayRecoveredEdits(), which is
// ultimately called by HRegion::initialize()
@@ -446,7 +446,7 @@ public class TestWALObserver {
* TestHLog, but the purpose of that one is to see whether the loaded CP will
* impact existing WAL tests or not.
*/
- @Test
+ @Test (timeout=180000)
public void testWALObserverLoaded() throws Exception {
WAL log = wals.getWAL(UNSPECIFIED_REGION);
assertNotNull(getCoprocessor(log, SampleRegionWALObserver.class));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java
index 229b170..a2848d7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java
@@ -46,7 +46,7 @@ public class TestForeignExceptionDispatcher {
* Tests that a dispatcher only dispatches only the first exception, and does not propagate
* subsequent exceptions.
*/
- @Test
+ @Test (timeout=60000)
public void testErrorPropagation() {
ForeignExceptionListener listener1 = Mockito.mock(ForeignExceptionListener.class);
ForeignExceptionListener listener2 = Mockito.mock(ForeignExceptionListener.class);
@@ -78,7 +78,7 @@ public class TestForeignExceptionDispatcher {
Mockito.verify(listener2, Mockito.never()).receive(EXTEXN2);
}
- @Test
+ @Test (timeout=60000)
public void testSingleDispatcherWithTimer() {
ForeignExceptionListener listener1 = Mockito.mock(ForeignExceptionListener.class);
ForeignExceptionListener listener2 = Mockito.mock(ForeignExceptionListener.class);
@@ -103,7 +103,7 @@ public class TestForeignExceptionDispatcher {
/**
* Test that the dispatcher can receive an error via the timer mechanism.
*/
- @Test
+ @Test (timeout=60000)
public void testAttemptTimer() {
ForeignExceptionListener listener1 = Mockito.mock(ForeignExceptionListener.class);
ForeignExceptionListener listener2 = Mockito.mock(ForeignExceptionListener.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java
index f893555..4358d2e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java
@@ -40,7 +40,7 @@ public class TestForeignExceptionSerialization {
* Verify that we get back similar stack trace information before an after serialization.
* @throws InvalidProtocolBufferException
*/
- @Test
+ @Test (timeout=60000)
public void testSimpleException() throws InvalidProtocolBufferException {
String data = "some bytes";
ForeignException in = new ForeignException("SRC", new IllegalArgumentException(data));
@@ -63,7 +63,7 @@ public class TestForeignExceptionSerialization {
* serialization and deserialization
* @throws InvalidProtocolBufferException
*/
- @Test
+ @Test (timeout=60000)
public void testRemoteFromLocal() throws InvalidProtocolBufferException {
String errorMsg = "some message";
Exception generic = new Exception(errorMsg);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java
index 49f6164..a8392b3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java
@@ -51,7 +51,7 @@ public class TestTimeoutExceptionInjector {
/**
* Test that a manually triggered exception with data fires with the data in receiveError.
*/
- @Test
+ @Test (timeout=60000)
public void testTimerPassesOnErrorInfo() {
final long time = 1000000;
ForeignExceptionListener listener = Mockito.mock(ForeignExceptionListener.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java
index 0561ac4..dabdc3f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java
@@ -43,7 +43,7 @@ import static org.mockito.Mockito.*;
public class TestExecutorService {
private static final Log LOG = LogFactory.getLog(TestExecutorService.class);
- @Test
+ @Test (timeout=60000)
public void testExecutorService() throws Exception {
int maxThreads = 5;
int maxTries = 10;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java
index 21414f0..6f6ade1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java
@@ -42,7 +42,7 @@ public class TestBitComparator {
private final int Equal = 0;
private final int NotEqual = 1;
- @Test
+ @Test (timeout=60000)
public void testANDOperation() {
testOperation(zeros, ones, BitComparator.BitwiseOp.AND, NotEqual);
testOperation(data1, ones, BitComparator.BitwiseOp.AND, Equal);
@@ -52,7 +52,7 @@ public class TestBitComparator {
testOperation(ones, data3, BitComparator.BitwiseOp.AND, NotEqual);
}
- @Test
+ @Test (timeout=60000)
public void testOROperation() {
testOperation(ones, zeros, BitComparator.BitwiseOp.OR, Equal);
testOperation(zeros, zeros, BitComparator.BitwiseOp.OR, NotEqual);
@@ -61,7 +61,7 @@ public class TestBitComparator {
testOperation(ones, data3, BitComparator.BitwiseOp.OR, NotEqual);
}
- @Test
+ @Test (timeout=60000)
public void testXOROperation() {
testOperation(ones, zeros, BitComparator.BitwiseOp.XOR, Equal);
testOperation(zeros, zeros, BitComparator.BitwiseOp.XOR, NotEqual);
@@ -75,20 +75,20 @@ public class TestBitComparator {
assertEquals(comparator.compareTo(data), expected);
}
- @Test
+ @Test (timeout=60000)
public void testANDOperationWithOffset() {
testOperationWithOffset(data1_2, ones, BitComparator.BitwiseOp.AND, Equal);
testOperationWithOffset(data1_2, data0, BitComparator.BitwiseOp.AND, NotEqual);
testOperationWithOffset(data2_2, data1, BitComparator.BitwiseOp.AND, NotEqual);
}
- @Test
+ @Test (timeout=60000)
public void testOROperationWithOffset() {
testOperationWithOffset(data1_2, zeros, BitComparator.BitwiseOp.OR, Equal);
testOperationWithOffset(data2_2, data1, BitComparator.BitwiseOp.OR, Equal);
}
- @Test
+ @Test (timeout=60000)
public void testXOROperationWithOffset() {
testOperationWithOffset(data2_2, data1, BitComparator.BitwiseOp.XOR, Equal);
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java
index 4d0329b..9844993 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java
@@ -84,7 +84,7 @@ public class TestColumnPaginationFilter
* Tests serialization
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testSerialization() throws Exception {
Filter newFilter = serializationTest(columnPaginationFilter);
basicFilterTests((ColumnPaginationFilter)newFilter);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java
index 2e51c82..7f4c2ed 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java
@@ -45,7 +45,7 @@ public class TestColumnPrefixFilter {
private final static HBaseTestingUtility TEST_UTIL = new
HBaseTestingUtility();
- @Test
+ @Test (timeout=60000)
public void testColumnPrefixFilter() throws IOException {
String family = "Family";
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("TestColumnPrefixFilter"));
@@ -107,7 +107,7 @@ public class TestColumnPrefixFilter {
HBaseTestingUtility.closeRegionAndWAL(region);
}
- @Test
+ @Test (timeout=60000)
public void testColumnPrefixFilterWithFilterList() throws IOException {
String family = "Family";
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("TestColumnPrefixFilter"));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java
index 1c81adf..baf45ce 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java
@@ -156,7 +156,7 @@ public class TestColumnRangeFilter {
// Nothing to do.
}
- @Test
+ @Test (timeout=180000)
public void TestColumnRangeFilterClient() throws Exception {
String family = "Family";
String table = "TestColumnRangeFilterClient";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java
index 223416f..ae35667 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java
@@ -34,14 +34,14 @@ import org.junit.experimental.categories.Category;
@Category({FilterTests.class, SmallTests.class})
public class TestComparatorSerialization {
- @Test
+ @Test (timeout=60000)
public void testBinaryComparator() throws Exception {
BinaryComparator binaryComparator = new BinaryComparator(Bytes.toBytes("binaryComparator"));
assertTrue(binaryComparator.areSerializedFieldsEqual(
ProtobufUtil.toComparator(ProtobufUtil.toComparator(binaryComparator))));
}
- @Test
+ @Test (timeout=60000)
public void testBinaryPrefixComparator() throws Exception {
BinaryPrefixComparator binaryPrefixComparator =
new BinaryPrefixComparator(Bytes.toBytes("binaryPrefixComparator"));
@@ -49,7 +49,7 @@ public class TestComparatorSerialization {
ProtobufUtil.toComparator(ProtobufUtil.toComparator(binaryPrefixComparator))));
}
- @Test
+ @Test (timeout=60000)
public void testBitComparator() throws Exception {
BitComparator bitComparator =
new BitComparator(Bytes.toBytes("bitComparator"), BitComparator.BitwiseOp.XOR);
@@ -57,14 +57,14 @@ public class TestComparatorSerialization {
ProtobufUtil.toComparator(ProtobufUtil.toComparator(bitComparator))));
}
- @Test
+ @Test (timeout=60000)
public void testNullComparator() throws Exception {
NullComparator nullComparator = new NullComparator();
assertTrue(nullComparator.areSerializedFieldsEqual(
ProtobufUtil.toComparator(ProtobufUtil.toComparator(nullComparator))));
}
- @Test
+ @Test (timeout=60000)
public void testRegexStringComparator() throws Exception {
// test without specifying flags
RegexStringComparator regexStringComparator = new RegexStringComparator(".+-2");
@@ -79,7 +79,7 @@ public class TestComparatorSerialization {
}
}
- @Test
+ @Test (timeout=60000)
public void testSubstringComparator() throws Exception {
SubstringComparator substringComparator = new SubstringComparator("substr");
assertTrue(substringComparator.areSerializedFieldsEqual(
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
index 40a4c43..84c310f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
@@ -166,7 +166,7 @@ public class TestDependentColumnFilter {
/**
* Test scans using a DependentColumnFilter
*/
- @Test
+ @Test (timeout=60000)
public void testScans() throws Exception {
Filter filter = new DependentColumnFilter(FAMILIES[0], QUALIFIER);
@@ -222,7 +222,7 @@ public class TestDependentColumnFilter {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testFilterDropping() throws Exception {
Filter filter = new DependentColumnFilter(FAMILIES[0], QUALIFIER);
List accepted = new ArrayList();
@@ -253,7 +253,7 @@ public class TestDependentColumnFilter {
/**
* Test for HBASE-8794. Avoid NullPointerException in DependentColumnFilter.toString().
*/
- @Test
+ @Test (timeout=60000)
public void testToStringWithNullComparator() {
// Test constructor that implicitly sets a null comparator
Filter filter = new DependentColumnFilter(FAMILIES[0], QUALIFIER);
@@ -268,7 +268,7 @@ public class TestDependentColumnFilter {
filter.toString().contains("null"));
}
- @Test
+ @Test (timeout=60000)
public void testToStringWithNonNullComparator() {
Filter filter =
new DependentColumnFilter(FAMILIES[0], QUALIFIER, true, CompareOp.EQUAL,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
index 61321dd..a236b8c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
@@ -220,7 +220,7 @@ public class TestFilter {
HBaseTestingUtility.closeRegionAndWAL(region);
}
- @Test
+ @Test (timeout=60000)
public void testRegionScannerReseek() throws Exception {
// create new rows and column family to show how reseek works..
for (byte[] ROW : ROWS_THREE) {
@@ -287,7 +287,7 @@ public class TestFilter {
}
}
- @Test
+ @Test (timeout=60000)
public void testNoFilter() throws Exception {
// No filter
long expectedRows = this.numRows;
@@ -303,7 +303,7 @@ public class TestFilter {
verifyScan(s, expectedRows, expectedKeys/2);
}
- @Test
+ @Test (timeout=60000)
public void testPrefixFilter() throws Exception {
// Grab rows from group one (half of total)
long expectedRows = this.numRows / 2;
@@ -323,7 +323,7 @@ public class TestFilter {
verifyScan(s, expectedRows, expectedKeys);
}
- @Test
+ @Test (timeout=60000)
public void testPageFilter() throws Exception {
// KVs in first 6 rows
@@ -553,7 +553,7 @@ public class TestFilter {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testWhileMatchFilterWithFilterRow() throws Exception {
final int pageSize = 4;
@@ -611,7 +611,7 @@ public class TestFilter {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void tes94FilterRowCompatibility() throws Exception {
Scan s = new Scan();
OldTestFilter filter = new OldTestFilter();
@@ -632,7 +632,7 @@ public class TestFilter {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testWhileMatchFilterWithFilterRowKey() throws Exception {
Scan s = new Scan();
String prefix = "testRowOne";
@@ -660,7 +660,7 @@ public class TestFilter {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testWhileMatchFilterWithFilterKeyValue() throws Exception {
Scan s = new Scan();
WhileMatchFilter filter = new WhileMatchFilter(
@@ -679,7 +679,7 @@ public class TestFilter {
}
}
- @Test
+ @Test (timeout=60000)
public void testInclusiveStopFilter() throws IOException {
// Grab rows from group one
@@ -714,7 +714,7 @@ public class TestFilter {
}
- @Test
+ @Test (timeout=60000)
public void testQualifierFilter() throws IOException {
// Match two keys (one from each family) in half the rows
@@ -872,7 +872,7 @@ public class TestFilter {
}
- @Test
+ @Test (timeout=60000)
public void testFamilyFilter() throws IOException {
// Match family, only half of columns returned.
@@ -1007,7 +1007,7 @@ public class TestFilter {
}
- @Test
+ @Test (timeout=60000)
public void testRowFilter() throws IOException {
// Match a single row, all keys
@@ -1154,7 +1154,7 @@ public class TestFilter {
}
- @Test
+ @Test (timeout=60000)
public void testValueFilter() throws IOException {
// Match group one rows
@@ -1278,7 +1278,7 @@ public class TestFilter {
verifyScanFull(s, kvs);
}
- @Test
+ @Test (timeout=60000)
public void testSkipFilter() throws IOException {
// Test for qualifier regex: "testQualifierOne-2"
@@ -1316,7 +1316,7 @@ public class TestFilter {
// TODO: This is important... need many more tests for ordering, etc
// There are limited tests elsewhere but we need HRegion level ones here
- @Test
+ @Test (timeout=60000)
public void testFilterList() throws IOException {
// Test getting a single row, single key using Row, Qualifier, and Value
@@ -1349,7 +1349,7 @@ public class TestFilter {
}
- @Test
+ @Test (timeout=60000)
public void testFirstKeyOnlyFilter() throws IOException {
Scan s = new Scan();
s.setFilter(new FirstKeyOnlyFilter());
@@ -1365,7 +1365,7 @@ public class TestFilter {
verifyScanFull(s, kvs);
}
- @Test
+ @Test (timeout=60000)
public void testFilterListWithSingleColumnValueFilter() throws IOException {
// Test for HBASE-3191
@@ -1443,7 +1443,7 @@ public class TestFilter {
}
// HBASE-9747
- @Test
+ @Test (timeout=60000)
public void testFilterListWithPrefixFilter() throws IOException {
byte[] family = Bytes.toBytes("f1");
byte[] qualifier = Bytes.toBytes("q1");
@@ -1490,7 +1490,7 @@ public class TestFilter {
wal.close();
}
- @Test
+ @Test (timeout=60000)
public void testSingleColumnValueFilter() throws IOException {
// From HBASE-1821
@@ -1812,7 +1812,7 @@ public class TestFilter {
this.verifyScanFull(s, expectedKVs3);
}
- @Test
+ @Test (timeout=60000)
public void testColumnPaginationFilter() throws Exception {
// Test that the filter skips multiple column versions.
Put p = new Put(ROWS_ONE[0]);
@@ -1910,7 +1910,7 @@ public class TestFilter {
this.verifyScanFull(s, expectedKVs4);
}
- @Test
+ @Test (timeout=60000)
public void testKeyOnlyFilter() throws Exception {
// KVs in first 6 rows
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
index 759435b..8e82b7f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
@@ -57,7 +57,7 @@ public class TestFilterList {
static byte[] BAD_BYTES = Bytes.toBytes("def");
- @Test
+ @Test (timeout=60000)
public void testAddFilter() throws Exception {
Filter filter1 = new FirstKeyOnlyFilter();
Filter filter2 = new FirstKeyOnlyFilter();
@@ -81,7 +81,7 @@ public class TestFilterList {
* Test "must pass one"
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testMPONE() throws Exception {
mpOneTest(getFilterMPONE());
}
@@ -147,7 +147,7 @@ public class TestFilterList {
* Test "must pass all"
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testMPALL() throws Exception {
mpAllTest(getMPALLFilter());
}
@@ -195,7 +195,7 @@ public class TestFilterList {
* Test list ordering
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testOrdering() throws Exception {
orderingTest(getOrderingFilter());
}
@@ -328,7 +328,7 @@ public class TestFilterList {
* Test serialization
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testSerialization() throws Exception {
List filters = new ArrayList();
filters.add(new PageFilter(MAX_PAGES));
@@ -404,7 +404,7 @@ public class TestFilterList {
/**
* Test pass-thru of hints.
*/
- @Test
+ @Test (timeout=60000)
public void testHintPassThru() throws Exception {
final KeyValue minKeyValue = new KeyValue(Bytes.toBytes(0L), null, null);
@@ -516,7 +516,7 @@ public class TestFilterList {
* transform() only applies after a filterKeyValue() whose return-code includes the KeyValue.
* Lazy evaluation of AND
*/
- @Test
+ @Test (timeout=60000)
public void testTransformMPO() throws Exception {
// Apply the following filter:
// (family=fam AND qualifier=qual1 AND KeyOnlyFilter)
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java
index 0a8b4bf..81f1cbf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java
@@ -39,21 +39,21 @@ import org.junit.experimental.categories.Category;
@Category({FilterTests.class, SmallTests.class})
public class TestFilterSerialization {
- @Test
+ @Test (timeout=60000)
public void testColumnCountGetFilter() throws Exception {
ColumnCountGetFilter columnCountGetFilter = new ColumnCountGetFilter(1);
assertTrue(columnCountGetFilter.areSerializedFieldsEqual(
ProtobufUtil.toFilter(ProtobufUtil.toFilter(columnCountGetFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testColumnPaginationFilter() throws Exception {
ColumnPaginationFilter columnPaginationFilter = new ColumnPaginationFilter(1,7);
assertTrue(columnPaginationFilter.areSerializedFieldsEqual(
ProtobufUtil.toFilter(ProtobufUtil.toFilter(columnPaginationFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testColumnPrefixFilter() throws Exception {
// empty string
ColumnPrefixFilter columnPrefixFilter = new ColumnPrefixFilter(Bytes.toBytes(""));
@@ -66,7 +66,7 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(columnPrefixFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testColumnRangeFilter() throws Exception {
// null columns
ColumnRangeFilter columnRangeFilter = new ColumnRangeFilter(null, true, null, false);
@@ -79,7 +79,7 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(columnRangeFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testDependentColumnFilter() throws Exception {
// null column qualifier/family
DependentColumnFilter dependentColumnFilter = new DependentColumnFilter(null, null);
@@ -94,7 +94,7 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(dependentColumnFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testFamilyFilter() throws Exception {
FamilyFilter familyFilter = new FamilyFilter(CompareFilter.CompareOp.EQUAL,
new BinaryPrefixComparator(Bytes.toBytes("testValueOne")));
@@ -102,7 +102,7 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(familyFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testFilterList() throws Exception {
// empty filter list
FilterList filterList = new FilterList(new LinkedList());
@@ -118,7 +118,7 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(filterList))));
}
- @Test
+ @Test (timeout=60000)
public void testFilterWrapper() throws Exception {
FilterWrapper filterWrapper =
new FilterWrapper(
@@ -127,7 +127,7 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(filterWrapper))));
}
- @Test
+ @Test (timeout=60000)
public void testFirstKeyValueMatchingQualifiersFilter() throws Exception {
// empty qualifiers set
TreeSet set = new TreeSet(Bytes.BYTES_COMPARATOR);
@@ -145,14 +145,14 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(firstKeyValueMatchingQualifiersFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testFirstKeyOnlyFilter() throws Exception {
FirstKeyOnlyFilter firstKeyOnlyFilter = new FirstKeyOnlyFilter();
assertTrue(firstKeyOnlyFilter.areSerializedFieldsEqual(
ProtobufUtil.toFilter(ProtobufUtil.toFilter(firstKeyOnlyFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testFuzzyRowFilter() throws Exception {
LinkedList> fuzzyList = new LinkedList>();
fuzzyList.add(new Pair(Bytes.toBytes("999"),new byte[] {0, 0, 1}));
@@ -162,7 +162,7 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(fuzzyRowFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testInclusiveStopFilter() throws Exception {
// InclusveStopFilter with null stopRowKey
InclusiveStopFilter inclusiveStopFilter = new InclusiveStopFilter(null);
@@ -175,7 +175,7 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(inclusiveStopFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testKeyOnlyFilter() throws Exception {
// KeyOnlyFilter with lenAsVal
KeyOnlyFilter keyOnlyFilter = new KeyOnlyFilter(true);
@@ -188,7 +188,7 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(keyOnlyFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testMultipleColumnPrefixFilter() throws Exception {
// empty array
byte [][] prefixes = null;
@@ -206,14 +206,14 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(multipleColumnPrefixFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testPageFilter() throws Exception {
PageFilter pageFilter = new PageFilter(6);
assertTrue(pageFilter.areSerializedFieldsEqual(
ProtobufUtil.toFilter(ProtobufUtil.toFilter(pageFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testPrefixFilter() throws Exception {
// null prefix
PrefixFilter prefixFilter = new PrefixFilter(null);
@@ -226,7 +226,7 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(prefixFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testQualifierFilter() throws Exception {
QualifierFilter qualifierFilter = new QualifierFilter(CompareFilter.CompareOp.EQUAL,
new NullComparator());
@@ -234,14 +234,14 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(qualifierFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testRandomRowFilter() throws Exception {
RandomRowFilter randomRowFilter = new RandomRowFilter((float)0.1);
assertTrue(randomRowFilter.areSerializedFieldsEqual(
ProtobufUtil.toFilter(ProtobufUtil.toFilter(randomRowFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testRowFilter() throws Exception {
RowFilter rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL,
new SubstringComparator("testRowFilter"));
@@ -249,7 +249,7 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(rowFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testSingleColumnValueExcludeFilter() throws Exception {
// null family/column SingleColumnValueExcludeFilter
SingleColumnValueExcludeFilter singleColumnValueExcludeFilter =
@@ -266,7 +266,7 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(singleColumnValueExcludeFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testSingleColumnValueFilter() throws Exception {
// null family/column SingleColumnValueFilter
SingleColumnValueFilter singleColumnValueFilter =
@@ -283,14 +283,14 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(singleColumnValueFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testSkipFilter() throws Exception {
SkipFilter skipFilter = new SkipFilter(new PageFilter(6));
assertTrue(skipFilter.areSerializedFieldsEqual(
ProtobufUtil.toFilter(ProtobufUtil.toFilter(skipFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testTimestampsFilter() throws Exception {
// Empty timestamp list
TimestampsFilter timestampsFilter = new TimestampsFilter(new LinkedList());
@@ -306,7 +306,7 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(timestampsFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testValueFilter() throws Exception {
ValueFilter valueFilter = new ValueFilter(CompareFilter.CompareOp.NO_OP,
new BinaryComparator(Bytes.toBytes("testValueOne")));
@@ -314,7 +314,7 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(valueFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testWhileMatchFilter() throws Exception {
WhileMatchFilter whileMatchFilter =
new WhileMatchFilter(
@@ -323,7 +323,7 @@ public class TestFilterSerialization {
ProtobufUtil.toFilter(ProtobufUtil.toFilter(whileMatchFilter))));
}
- @Test
+ @Test (timeout=60000)
public void testMultiRowRangeFilter() throws Exception {
List ranges = new ArrayList();
ranges.add(new RowRange(Bytes.toBytes(30), true, Bytes.toBytes(40), false));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
index 78a4d1f..ada0abb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
@@ -54,7 +54,7 @@ public class TestFilterWithScanLimits extends FilterTestingCluster {
private static final TableName tableName = TableName.valueOf("scanWithLimit");
private static final String columnFamily = "f1";
- @Test
+ @Test (timeout=180000)
public void testScanWithLimit() {
int kv_number = 0;
try {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
index 8ce0b76..590577c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
@@ -70,7 +70,7 @@ public class TestFilterWrapper {
private static TableName name = TableName.valueOf("test");
private static Connection connection;
- @Test
+ @Test (timeout=180000)
public void testFilterWrapper() {
int kv_number = 0;
int row_number = 0;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
index 565c7db..d2779e7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
@@ -86,7 +86,7 @@ public class TestFuzzyRowAndColumnRangeFilter {
// Nothing to do.
}
- @Test
+ @Test (timeout=180000)
public void Test() throws Exception {
String cf = "f";
String table = "TestFuzzyAndColumnRangeFilterClient";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilter.java
index 3ec1351..ee6a593 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilter.java
@@ -26,7 +26,7 @@ import org.junit.experimental.categories.Category;
@Category({FilterTests.class, SmallTests.class})
public class TestFuzzyRowFilter {
- @Test
+ @Test (timeout=60000)
public void testSatisfiesForward() {
Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS,
FuzzyRowFilter.satisfies(false,
@@ -95,7 +95,7 @@ public class TestFuzzyRowFilter {
new byte[]{1, 0, 0}));
}
- @Test
+ @Test (timeout=60000)
public void testSatisfiesReverse() {
Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NO_NEXT,
FuzzyRowFilter.satisfies(true,
@@ -164,7 +164,7 @@ public class TestFuzzyRowFilter {
new byte[]{1, 0, 0}));
}
- @Test
+ @Test (timeout=60000)
public void testGetNextForFuzzyRuleForward() {
assertNext(false,
new byte[]{0, 1, 2}, // fuzzy row
@@ -275,7 +275,7 @@ public class TestFuzzyRowFilter {
new byte[]{0, 0, 1, 0}));
}
- @Test
+ @Test (timeout=60000)
public void testGetNextForFuzzyRuleReverse() {
assertNext(true,
new byte[]{0, 1, 2}, // fuzzy row
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java
index e527ca8..483a2c3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java
@@ -48,7 +48,7 @@ public class TestInclusiveStopFilter {
* Tests identification of the stop row
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testStopRowIdentification() throws Exception {
stopRowTests(mainFilter);
}
@@ -57,7 +57,7 @@ public class TestInclusiveStopFilter {
* Tests serialization
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testSerialization() throws Exception {
// Decompose mainFilter to bytes.
byte[] buffer = mainFilter.toByteArray();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
index 5454480d..3974c8f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
@@ -81,7 +81,7 @@ public class TestInvocationRecordFilter {
this.region.flushcache();
}
- @Test
+ @Test (timeout=60000)
public void testFilterInvocation() throws Exception {
List selectQualifiers = new ArrayList();
List expectedQualifiers = new ArrayList();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java
index c2d25de..0bd5e2d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java
@@ -68,7 +68,7 @@ public class TestMultiRowRangeFilter {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testMergeAndSortWithEmptyStartRow() throws IOException {
List ranges = new ArrayList();
ranges.add(new RowRange(Bytes.toBytes(""), true, Bytes.toBytes(20), false));
@@ -79,7 +79,7 @@ public class TestMultiRowRangeFilter {
assertRangesEqual(expectedRanges, actualRanges);
}
- @Test
+ @Test (timeout=180000)
public void testMergeAndSortWithEmptyStopRow() throws IOException {
List ranges = new ArrayList();
ranges.add(new RowRange(Bytes.toBytes(10), true, Bytes.toBytes(20), false));
@@ -91,7 +91,7 @@ public class TestMultiRowRangeFilter {
assertRangesEqual(expectedRanges, actualRanges);
}
- @Test
+ @Test (timeout=180000)
public void testMergeAndSortWithEmptyStartRowAndStopRow() throws IOException {
List ranges = new ArrayList();
ranges.add(new RowRange(Bytes.toBytes(10), true, Bytes.toBytes(20), false));
@@ -119,7 +119,7 @@ public class TestMultiRowRangeFilter {
new MultiRowRangeFilter(ranges);
}
- @Test
+ @Test (timeout=180000)
public void testMergeAndSortWithoutOverlap() throws IOException {
List ranges = new ArrayList();
ranges.add(new RowRange(Bytes.toBytes(10), true, Bytes.toBytes(20), false));
@@ -133,7 +133,7 @@ public class TestMultiRowRangeFilter {
assertRangesEqual(expectedRanges, actualRanges);
}
- @Test
+ @Test (timeout=180000)
public void testMergeAndSortWithOverlap() throws IOException {
List ranges = new ArrayList();
ranges.add(new RowRange(Bytes.toBytes(10), true, Bytes.toBytes(20), false));
@@ -150,7 +150,7 @@ public class TestMultiRowRangeFilter {
assertRangesEqual(expectedRanges, actualRanges);
}
- @Test
+ @Test (timeout=180000)
public void testMergeAndSortWithStartRowInclusive() throws IOException {
List ranges = new ArrayList();
ranges.add(new RowRange(Bytes.toBytes(10), true, Bytes.toBytes(20), false));
@@ -161,7 +161,7 @@ public class TestMultiRowRangeFilter {
assertRangesEqual(expectedRanges, actualRanges);
}
- @Test
+ @Test (timeout=180000)
public void testMergeAndSortWithRowExclusive() throws IOException {
List ranges = new ArrayList();
ranges.add(new RowRange(Bytes.toBytes(10), true, Bytes.toBytes(20), false));
@@ -173,7 +173,7 @@ public class TestMultiRowRangeFilter {
assertRangesEqual(expectedRanges, actualRanges);
}
- @Test
+ @Test (timeout=180000)
public void testMergeAndSortWithRowInclusive() throws IOException {
List ranges = new ArrayList();
ranges.add(new RowRange(Bytes.toBytes(10), true, Bytes.toBytes(20), true));
@@ -196,7 +196,7 @@ public class TestMultiRowRangeFilter {
}
}
- @Test
+ @Test (timeout=180000)
public void testMultiRowRangeFilterWithRangeOverlap() throws IOException {
tableName = Bytes.toBytes("testMultiRowRangeFilterWithRangeOverlap");
HTable ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE);
@@ -224,7 +224,7 @@ public class TestMultiRowRangeFilter {
ht.close();
}
- @Test
+ @Test (timeout=180000)
public void testMultiRowRangeFilterWithoutRangeOverlap() throws IOException {
tableName = Bytes.toBytes("testMultiRowRangeFilterWithoutRangeOverlap");
HTable ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE);
@@ -251,7 +251,7 @@ public class TestMultiRowRangeFilter {
ht.close();
}
- @Test
+ @Test (timeout=180000)
public void testMultiRowRangeFilterWithEmptyStartRow() throws IOException {
tableName = Bytes.toBytes("testMultiRowRangeFilterWithEmptyStartRow");
HTable ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE);
@@ -273,7 +273,7 @@ public class TestMultiRowRangeFilter {
ht.close();
}
- @Test
+ @Test (timeout=180000)
public void testMultiRowRangeFilterWithEmptyStopRow() throws IOException {
tableName = Bytes.toBytes("testMultiRowRangeFilterWithEmptyStopRow");
HTable ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE);
@@ -294,7 +294,7 @@ public class TestMultiRowRangeFilter {
ht.close();
}
- @Test
+ @Test (timeout=180000)
public void testMultiRowRangeFilterWithInclusive() throws IOException {
tableName = Bytes.toBytes("testMultiRowRangeFilterWithInclusive");
HTable ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE);
@@ -322,7 +322,7 @@ public class TestMultiRowRangeFilter {
ht.close();
}
- @Test
+ @Test (timeout=180000)
public void testMultiRowRangeFilterWithExclusive() throws IOException {
tableName = Bytes.toBytes("testMultiRowRangeFilterWithExclusive");
HTable ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE);
@@ -348,7 +348,7 @@ public class TestMultiRowRangeFilter {
ht.close();
}
- @Test
+ @Test (timeout=180000)
public void testMultiRowRangeWithFilterListAndOperator() throws IOException {
tableName = Bytes.toBytes("TestMultiRowRangeFilterWithFilterListAndOperator");
HTable ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE);
@@ -383,7 +383,7 @@ public class TestMultiRowRangeFilter {
ht.close();
}
- @Test
+ @Test (timeout=180000)
public void testMultiRowRangeWithFilterListOrOperator() throws IOException {
tableName = Bytes.toBytes("TestMultiRowRangeFilterWithFilterListOrOperator");
HTable ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java
index d2997af..96efc42 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java
@@ -45,7 +45,7 @@ public class TestMultipleColumnPrefixFilter {
private final static HBaseTestingUtility TEST_UTIL = new
HBaseTestingUtility();
- @Test
+ @Test (timeout=60000)
public void testMultipleColumnPrefixFilter() throws IOException {
String family = "Family";
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("TestMultipleColumnPrefixFilter"));
@@ -108,7 +108,7 @@ public class TestMultipleColumnPrefixFilter {
HBaseTestingUtility.closeRegionAndWAL(region);
}
- @Test
+ @Test (timeout=60000)
public void testMultipleColumnPrefixFilterWithManyFamilies() throws IOException {
String family1 = "Family1";
String family2 = "Family2";
@@ -180,7 +180,7 @@ public class TestMultipleColumnPrefixFilter {
HBaseTestingUtility.closeRegionAndWAL(region);
}
- @Test
+ @Test (timeout=60000)
public void testMultipleColumnPrefixFilterWithColumnPrefixFilter() throws IOException {
String family = "Family";
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("TestMultipleColumnPrefixFilter"));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestNullComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestNullComparator.java
index 2f13da1..b3394f9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestNullComparator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestNullComparator.java
@@ -25,7 +25,7 @@ import org.junit.experimental.categories.Category;
@Category({FilterTests.class, SmallTests.class})
public class TestNullComparator {
- @Test
+ @Test (timeout=60000)
public void testNullValue()
{
// given
@@ -41,7 +41,7 @@ public class TestNullComparator {
Assert.assertEquals(0, comp2);
}
- @Test
+ @Test (timeout=60000)
public void testNonNullValue() {
// given
byte[] value = new byte[] { 0, 1, 2, 3, 4, 5 };
@@ -56,7 +56,7 @@ public class TestNullComparator {
Assert.assertEquals(1, comp2);
}
- @Test
+ @Test (timeout=60000)
public void testEmptyValue() {
// given
byte[] value = new byte[] { 0 };
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java
index 139bf6f..a970540 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java
@@ -40,7 +40,7 @@ public class TestPageFilter {
* test page size filter
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testPageSize() throws Exception {
Filter f = new PageFilter(ROW_LIMIT);
pageSizeTests(f);
@@ -50,7 +50,7 @@ public class TestPageFilter {
* Test filter serialization
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testSerialization() throws Exception {
Filter f = new PageFilter(ROW_LIMIT);
// Decompose mainFilter to bytes.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java
index 4b2df33..131519b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java
@@ -53,7 +53,7 @@ public class TestParseFilter {
// Nothing to do.
}
- @Test
+ @Test (timeout=60000)
public void testKeyOnlyFilter() throws IOException {
String filterString = "KeyOnlyFilter()";
doTestFilter(filterString, KeyOnlyFilter.class);
@@ -68,7 +68,7 @@ public class TestParseFilter {
}
}
- @Test
+ @Test (timeout=60000)
public void testFirstKeyOnlyFilter() throws IOException {
String filterString = " FirstKeyOnlyFilter( ) ";
doTestFilter(filterString, FirstKeyOnlyFilter.class);
@@ -83,7 +83,7 @@ public class TestParseFilter {
}
}
- @Test
+ @Test (timeout=60000)
public void testPrefixFilter() throws IOException {
String filterString = " PrefixFilter('row' ) ";
PrefixFilter prefixFilter = doTestFilter(filterString, PrefixFilter.class);
@@ -100,7 +100,7 @@ public class TestParseFilter {
}
}
- @Test
+ @Test (timeout=60000)
public void testColumnPrefixFilter() throws IOException {
String filterString = " ColumnPrefixFilter('qualifier' ) ";
ColumnPrefixFilter columnPrefixFilter =
@@ -109,7 +109,7 @@ public class TestParseFilter {
assertEquals(new String(columnPrefix), "qualifier");
}
- @Test
+ @Test (timeout=60000)
public void testMultipleColumnPrefixFilter() throws IOException {
String filterString = " MultipleColumnPrefixFilter('qualifier1', 'qualifier2' ) ";
MultipleColumnPrefixFilter multipleColumnPrefixFilter =
@@ -119,7 +119,7 @@ public class TestParseFilter {
assertEquals(new String(prefixes[1]), "qualifier2");
}
- @Test
+ @Test (timeout=60000)
public void testColumnCountGetFilter() throws IOException {
String filterString = " ColumnCountGetFilter(4)";
ColumnCountGetFilter columnCountGetFilter =
@@ -144,7 +144,7 @@ public class TestParseFilter {
}
}
- @Test
+ @Test (timeout=60000)
public void testPageFilter() throws IOException {
String filterString = " PageFilter(4)";
PageFilter pageFilter =
@@ -161,7 +161,7 @@ public class TestParseFilter {
}
}
- @Test
+ @Test (timeout=60000)
public void testColumnPaginationFilter() throws IOException {
String filterString = "ColumnPaginationFilter(4, 6)";
ColumnPaginationFilter columnPaginationFilter =
@@ -196,7 +196,7 @@ public class TestParseFilter {
}
}
- @Test
+ @Test (timeout=60000)
public void testInclusiveStopFilter() throws IOException {
String filterString = "InclusiveStopFilter ('row 3')";
InclusiveStopFilter inclusiveStopFilter =
@@ -206,7 +206,7 @@ public class TestParseFilter {
}
- @Test
+ @Test (timeout=60000)
public void testTimestampsFilter() throws IOException {
String filterString = "TimestampsFilter(9223372036854775806, 6)";
TimestampsFilter timestampsFilter =
@@ -237,7 +237,7 @@ public class TestParseFilter {
}
}
- @Test
+ @Test (timeout=60000)
public void testRowFilter() throws IOException {
String filterString = "RowFilter ( =, 'binary:regionse')";
RowFilter rowFilter =
@@ -248,7 +248,7 @@ public class TestParseFilter {
assertEquals("regionse", new String(binaryComparator.getValue()));
}
- @Test
+ @Test (timeout=60000)
public void testFamilyFilter() throws IOException {
String filterString = "FamilyFilter(>=, 'binaryprefix:pre')";
FamilyFilter familyFilter =
@@ -260,7 +260,7 @@ public class TestParseFilter {
assertEquals("pre", new String(binaryPrefixComparator.getValue()));
}
- @Test
+ @Test (timeout=60000)
public void testQualifierFilter() throws IOException {
String filterString = "QualifierFilter(=, 'regexstring:pre*')";
QualifierFilter qualifierFilter =
@@ -272,7 +272,7 @@ public class TestParseFilter {
assertEquals("pre*", new String(regexStringComparator.getValue()));
}
- @Test
+ @Test (timeout=60000)
public void testValueFilter() throws IOException {
String filterString = "ValueFilter(!=, 'substring:pre')";
ValueFilter valueFilter =
@@ -284,7 +284,7 @@ public class TestParseFilter {
assertEquals("pre", new String(substringComparator.getValue()));
}
- @Test
+ @Test (timeout=60000)
public void testColumnRangeFilter() throws IOException {
String filterString = "ColumnRangeFilter('abc', true, 'xyz', false)";
ColumnRangeFilter columnRangeFilter =
@@ -295,7 +295,7 @@ public class TestParseFilter {
assertFalse(columnRangeFilter.isMaxColumnInclusive());
}
- @Test
+ @Test (timeout=60000)
public void testDependentColumnFilter() throws IOException {
String filterString = "DependentColumnFilter('family', 'qualifier', true, =, 'binary:abc')";
DependentColumnFilter dependentColumnFilter =
@@ -309,7 +309,7 @@ public class TestParseFilter {
assertEquals("abc", new String(binaryComparator.getValue()));
}
- @Test
+ @Test (timeout=60000)
public void testSingleColumnValueFilter() throws IOException {
String filterString = "SingleColumnValueFilter " +
"('family', 'qualifier', >=, 'binary:a', true, false)";
@@ -338,7 +338,7 @@ public class TestParseFilter {
assertTrue(singleColumnValueFilter.getLatestVersionOnly());
}
- @Test
+ @Test (timeout=60000)
public void testSingleColumnValueExcludeFilter() throws IOException {
String filterString =
"SingleColumnValueExcludeFilter ('family', 'qualifier', <, 'binaryprefix:a')";
@@ -367,7 +367,7 @@ public class TestParseFilter {
assertFalse(singleColumnValueExcludeFilter.getLatestVersionOnly());
}
- @Test
+ @Test (timeout=60000)
public void testSkipFilter() throws IOException {
String filterString = "SKIP ValueFilter( =, 'binary:0')";
SkipFilter skipFilter =
@@ -381,7 +381,7 @@ public class TestParseFilter {
assertEquals("0", new String(binaryComparator.getValue()));
}
- @Test
+ @Test (timeout=60000)
public void testWhileFilter() throws IOException {
String filterString = " WHILE RowFilter ( !=, 'binary:row1')";
WhileMatchFilter whileMatchFilter =
@@ -395,7 +395,7 @@ public class TestParseFilter {
assertEquals("row1", new String(binaryComparator.getValue()));
}
- @Test
+ @Test (timeout=60000)
public void testCompoundFilter1() throws IOException {
String filterString = " (PrefixFilter ('realtime')AND FirstKeyOnlyFilter())";
FilterList filterList =
@@ -410,7 +410,7 @@ public class TestParseFilter {
FirstKeyOnlyFilter firstKeyOnlyFilter = (FirstKeyOnlyFilter) filters.get(1);
}
- @Test
+ @Test (timeout=60000)
public void testCompoundFilter2() throws IOException {
String filterString = "(PrefixFilter('realtime') AND QualifierFilter (>=, 'binary:e'))" +
"OR FamilyFilter (=, 'binary:qualifier') ";
@@ -445,7 +445,7 @@ public class TestParseFilter {
assertEquals("e", new String(binaryComparator.getValue()));
}
- @Test
+ @Test (timeout=60000)
public void testCompoundFilter3() throws IOException {
String filterString = " ColumnPrefixFilter ('realtime')AND " +
"FirstKeyOnlyFilter() OR SKIP FamilyFilter(=, 'substring:hihi')";
@@ -479,7 +479,7 @@ public class TestParseFilter {
assertEquals("hihi", new String(substringComparator.getValue()));
}
- @Test
+ @Test (timeout=60000)
public void testCompoundFilter4() throws IOException {
String filterString = " ColumnPrefixFilter ('realtime') OR " +
"FirstKeyOnlyFilter() OR SKIP FamilyFilter(=, 'substring:hihi')";
@@ -508,7 +508,7 @@ public class TestParseFilter {
assertEquals("hihi", new String(substringComparator.getValue()));
}
- @Test
+ @Test (timeout=60000)
public void testIncorrectCompareOperator() throws IOException {
String filterString = "RowFilter ('>>' , 'binary:region')";
try {
@@ -519,7 +519,7 @@ public class TestParseFilter {
}
}
- @Test
+ @Test (timeout=60000)
public void testIncorrectComparatorType () throws IOException {
String filterString = "RowFilter ('>=' , 'binaryoperator:region')";
try {
@@ -547,7 +547,7 @@ public class TestParseFilter {
}
}
- @Test
+ @Test (timeout=60000)
public void testPrecedence1() throws IOException {
String filterString = " (PrefixFilter ('realtime')AND FirstKeyOnlyFilter()" +
" OR KeyOnlyFilter())";
@@ -570,7 +570,7 @@ public class TestParseFilter {
assertEquals(new String(prefix), "realtime");
}
- @Test
+ @Test (timeout=60000)
public void testPrecedence2() throws IOException {
String filterString = " PrefixFilter ('realtime')AND SKIP FirstKeyOnlyFilter()" +
"OR KeyOnlyFilter()";
@@ -595,7 +595,7 @@ public class TestParseFilter {
assertTrue(skipFilter.getFilter() instanceof FirstKeyOnlyFilter);
}
- @Test
+ @Test (timeout=60000)
public void testUnescapedQuote1 () throws IOException {
String filterString = "InclusiveStopFilter ('row''3')";
InclusiveStopFilter inclusiveStopFilter =
@@ -604,7 +604,7 @@ public class TestParseFilter {
assertEquals(new String(stopRowKey), "row'3");
}
- @Test
+ @Test (timeout=60000)
public void testUnescapedQuote2 () throws IOException {
String filterString = "InclusiveStopFilter ('row''3''')";
InclusiveStopFilter inclusiveStopFilter =
@@ -613,7 +613,7 @@ public class TestParseFilter {
assertEquals(new String(stopRowKey), "row'3'");
}
- @Test
+ @Test (timeout=60000)
public void testUnescapedQuote3 () throws IOException {
String filterString = " InclusiveStopFilter ('''')";
InclusiveStopFilter inclusiveStopFilter =
@@ -622,7 +622,7 @@ public class TestParseFilter {
assertEquals(new String(stopRowKey), "'");
}
- @Test
+ @Test (timeout=60000)
public void testIncorrectFilterString () throws IOException {
String filterString = "()";
byte [] filterStringAsByteArray = Bytes.toBytes(filterString);
@@ -634,14 +634,14 @@ public class TestParseFilter {
}
}
- @Test
+ @Test (timeout=60000)
public void testCorrectFilterString () throws IOException {
String filterString = "(FirstKeyOnlyFilter())";
FirstKeyOnlyFilter firstKeyOnlyFilter =
doTestFilter(filterString, FirstKeyOnlyFilter.class);
}
- @Test
+ @Test (timeout=60000)
public void testRegisterFilter() {
ParseFilter.registerFilter("MyFilter", "some.class");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java
index 02a55ba..22e69e8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java
@@ -41,17 +41,17 @@ public class TestPrefixFilter {
this.mainFilter = new PrefixFilter(Bytes.toBytes(HOST_PREFIX));
}
- @Test
+ @Test (timeout=60000)
public void testPrefixOnRow() throws Exception {
prefixRowTests(mainFilter);
}
- @Test
+ @Test (timeout=60000)
public void testPrefixOnRowInsideWhileMatchRow() throws Exception {
prefixRowTests(new WhileMatchFilter(this.mainFilter), true);
}
- @Test
+ @Test (timeout=60000)
public void testSerialization() throws Exception {
// Decompose mainFilter to bytes.
byte[] buffer = mainFilter.toByteArray();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRandomRowFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRandomRowFilter.java
index 8effca5..2560f78 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRandomRowFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRandomRowFilter.java
@@ -42,7 +42,7 @@ public class TestRandomRowFilter {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testBasics() throws Exception {
int included = 0;
int max = 1000000;
@@ -65,7 +65,7 @@ public class TestRandomRowFilter {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testSerialization() throws Exception {
RandomRowFilter newFilter = serializationTest(quarterChanceFilter);
// use epsilon float comparison
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRegexComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRegexComparator.java
index 9dbe432..35ef629 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRegexComparator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRegexComparator.java
@@ -32,7 +32,7 @@ import org.junit.experimental.categories.Category;
@Category({FilterTests.class, SmallTests.class})
public class TestRegexComparator {
- @Test
+ @Test (timeout=60000)
public void testSerialization() throws Exception {
// Default engine is the Java engine
RegexStringComparator a = new RegexStringComparator("a|b");
@@ -47,7 +47,7 @@ public class TestRegexComparator {
assertTrue(b.getEngine() instanceof RegexStringComparator.JoniRegexEngine);
}
- @Test
+ @Test (timeout=60000)
public void testJavaEngine() throws Exception {
for (TestCase t: TEST_CASES) {
boolean result = new RegexStringComparator(t.regex, t.flags, EngineType.JAVA)
@@ -57,7 +57,7 @@ public class TestRegexComparator {
}
}
- @Test
+ @Test (timeout=60000)
public void testJoniEngine() throws Exception {
for (TestCase t: TEST_CASES) {
boolean result = new RegexStringComparator(t.regex, t.flags, EngineType.JONI)
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java
index 3be10ec..2e66075 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java
@@ -47,7 +47,7 @@ public class TestScanRowPrefix extends FilterTestingCluster {
private static final Log LOG = LogFactory
.getLog(TestScanRowPrefix.class);
- @Test
+ @Test (timeout=180000)
public void testPrefixScanning() throws IOException {
TableName tableName = TableName.valueOf("prefixScanning");
createTable(tableName,"F");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java
index 7aa298c..248ec99 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java
@@ -51,7 +51,7 @@ public class TestSingleColumnValueExcludeFilter {
* Test the overridden functionality of filterKeyValue(KeyValue)
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testFilterKeyValue() throws Exception {
Filter filter = new SingleColumnValueExcludeFilter(COLUMN_FAMILY, COLUMN_QUALIFIER,
CompareOp.EQUAL, VAL_1);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java
index b4e364d..f4debe7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java
@@ -96,7 +96,7 @@ public class TestSingleColumnValueFilter {
new RegexStringComparator(pattern.pattern(), pattern.flags()));
}
- @Test
+ @Test (timeout=60000)
public void testLongComparator() throws IOException {
Filter filter = new SingleColumnValueFilter(COLUMN_FAMILY,
COLUMN_QUALIFIER, CompareOp.GREATER, new LongComparator(100L));
@@ -201,7 +201,7 @@ public class TestSingleColumnValueFilter {
* Tests identification of the stop row
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testStop() throws Exception {
basicFilterTests((SingleColumnValueFilter) basicFilter);
nullFilterTests(nullFilter);
@@ -214,7 +214,7 @@ public class TestSingleColumnValueFilter {
* Tests serialization
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testSerialization() throws Exception {
Filter newFilter = serializationTest(basicFilter);
basicFilterTests((SingleColumnValueFilter)newFilter);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
index 613d1ea..6eb318c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
@@ -111,7 +111,7 @@ public class TestBlockReorder {
/**
* Test that we're can add a hook, and that this hook works when we try to read the file in HDFS.
*/
- @Test
+ @Test (timeout=300000)
public void testBlockLocationReorder() throws Exception {
Path p = new Path("hello");
@@ -414,7 +414,7 @@ public class TestBlockReorder {
/**
* Test that the reorder algo works as we expect.
*/
- @Test
+ @Test (timeout=300000)
public void testBlockLocation() throws Exception {
// We need to start HBase to get HConstants.HBASE_DIR set in conf
htu.startMiniZKCluster();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java
index b06dea1..0235d82 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java
@@ -105,7 +105,7 @@ public class TestGlobalFilter extends HttpServerFunctionalTest {
}
}
- @Test
+ @Test (timeout=60000)
public void testServletFilter() throws Exception {
Configuration conf = new Configuration();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHtmlQuoting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHtmlQuoting.java
index 82fbe04..ac6a576 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHtmlQuoting.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHtmlQuoting.java
@@ -69,7 +69,7 @@ public class TestHtmlQuoting {
}
- @Test
+ @Test (timeout=60000)
public void testRequestQuoting() throws Exception {
HttpServletRequest mockReq = Mockito.mock(HttpServletRequest.class);
HttpServer.QuotingInputFilter.RequestQuoter quoter =
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLog.java
index 8fea254..8d101b7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLog.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLog.java
@@ -32,13 +32,13 @@ import static org.junit.Assert.assertNull;
@Category({MiscTests.class, SmallTests.class})
public class TestHttpRequestLog {
- @Test
+ @Test (timeout=60000)
public void testAppenderUndefined() {
RequestLog requestLog = HttpRequestLog.getRequestLog("test");
assertNull("RequestLog should be null", requestLog);
}
- @Test
+ @Test (timeout=60000)
public void testAppenderDefined() {
HttpRequestLogAppender requestLogAppender = new HttpRequestLogAppender();
requestLogAppender.setName("testrequestlog");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLogAppender.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLogAppender.java
index a17b9e9..ff37558 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLogAppender.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpRequestLogAppender.java
@@ -27,7 +27,7 @@ import static org.junit.Assert.assertEquals;
@Category({MiscTests.class, SmallTests.class})
public class TestHttpRequestLogAppender {
- @Test
+ @Test (timeout=60000)
public void testParameterPropagation() {
HttpRequestLogAppender requestLogAppender = new HttpRequestLogAppender();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
index ffb924c..29b6424 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
@@ -236,7 +236,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
}
- @Test
+ @Test (timeout=60000)
@Ignore
public void testContentTypes() throws Exception {
// Static CSS files should have text/css
@@ -359,7 +359,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
* enabled.
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
@Ignore
public void testDisabledAuthorizationOfDefaultServlets() throws Exception {
@@ -396,7 +396,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
@Ignore
public void testAuthorizationOfDefaultServlets() throws Exception {
Configuration conf = new Configuration();
@@ -437,7 +437,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
myServer.stop();
}
- @Test
+ @Test (timeout=60000)
public void testRequestQuoterWithNull() throws Exception {
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
Mockito.doReturn(null).when(request).getParameterValues("dummy");
@@ -447,7 +447,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
+ "when there are no values for the parameter", null, parameterValues);
}
- @Test
+ @Test (timeout=60000)
public void testRequestQuoterWithNotNull() throws Exception {
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
String[] values = new String[] { "abc", "def" };
@@ -473,7 +473,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
LOG.info("END testJersey()");
}
- @Test
+ @Test (timeout=60000)
public void testHasAdministratorAccess() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false);
@@ -514,7 +514,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
}
- @Test
+ @Test (timeout=60000)
public void testRequiresAuthorizationAccess() throws Exception {
Configuration conf = new Configuration();
ServletContext context = Mockito.mock(ServletContext.class);
@@ -588,7 +588,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
return server;
}
- @Test
+ @Test (timeout=60000)
public void testNoCacheHeader() throws Exception {
URL url = new URL(baseUrl, "/echo?a=b&c=d");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
@@ -603,7 +603,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
/**
* HTTPServer.Builder should proceed if a external connector is available.
*/
- @Test
+ @Test (timeout=60000)
public void testHttpServerBuilderWithExternalConnector() throws Exception {
Connector c = mock(Connector.class);
doReturn("localhost").when(c).getHost();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
index 2fb51ea..e6220dd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerLifecycle.java
@@ -61,7 +61,7 @@ public class TestHttpServerLifecycle extends HttpServerFunctionalTest {
*
* @throws Throwable on failure
*/
- @Test
+ @Test (timeout=60000)
public void testStartedServerIsAlive() throws Throwable {
HttpServer server = null;
server = createTestServer();
@@ -114,7 +114,7 @@ public class TestHttpServerLifecycle extends HttpServerFunctionalTest {
* @throws Throwable
* on failure
*/
- @Test
+ @Test (timeout=60000)
public void testWepAppContextAfterServerStop() throws Throwable {
HttpServer server = null;
String key = "test.attribute.key";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
index db394a8..741f7f9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java
@@ -37,7 +37,7 @@ public class TestHttpServerWebapps extends HttpServerFunctionalTest {
* Test that the test server is loadable on the classpath
* @throws Throwable if something went wrong
*/
- @Test
+ @Test (timeout=60000)
public void testValidServerResource() throws Throwable {
HttpServer server = null;
try {
@@ -51,7 +51,7 @@ public class TestHttpServerWebapps extends HttpServerFunctionalTest {
* Test that an invalid webapp triggers an exception
* @throws Throwable if something went wrong
*/
- @Test
+ @Test (timeout=60000)
public void testMissingServerResource() throws Throwable {
try {
HttpServer server = createServer("NoSuchWebapp");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
index 5854ea2..2ca6fe0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java
@@ -106,7 +106,7 @@ public class TestPathFilter extends HttpServerFunctionalTest {
}
}
- @Test
+ @Test (timeout=60000)
public void testPathSpecFilters() throws Exception {
Configuration conf = new Configuration();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
index 1b79aff..6160f00 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java
@@ -103,7 +103,7 @@ public class TestSSLHttpServer extends HttpServerFunctionalTest {
clientSslFactory.destroy();
}
- @Test
+ @Test (timeout=60000)
public void testEcho() throws Exception {
assertEquals("a:b\nc:d\n", readOut(new URL(baseUrl, "/echo?a=b&c=d")));
assertEquals("a:b\nc<:d\ne:>\n", readOut(new URL(baseUrl,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
index f9857e4..e53fe4a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java
@@ -105,7 +105,7 @@ public class TestServletFilter extends HttpServerFunctionalTest {
}
}
- @Test
+ @Test (timeout=60000)
@Ignore
//From stack
// Its a 'foreign' test, one that came in from hadoop when we copy/pasted http
@@ -172,7 +172,7 @@ public class TestServletFilter extends HttpServerFunctionalTest {
}
}
- @Test
+ @Test (timeout=60000)
public void testServletFilterWhenInitThrowsException() throws Exception {
Configuration conf = new Configuration();
// start a http server with ErrorFilter
@@ -191,7 +191,7 @@ public class TestServletFilter extends HttpServerFunctionalTest {
* Similar to the above test case, except that it uses a different API to add the
* filter. Regression test for HADOOP-8786.
*/
- @Test
+ @Test (timeout=60000)
public void testContextSpecificServletFilterWhenInitThrowsException()
throws Exception {
Configuration conf = new Configuration();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
index 0385355..253e875 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/conf/TestConfServlet.java
@@ -53,7 +53,7 @@ public class TestConfServlet extends TestCase {
return testConf;
}
- @Test
+ @Test (timeout=60000)
@SuppressWarnings("unchecked")
public void testWriteJson() throws Exception {
StringWriter sw = new StringWriter();
@@ -76,7 +76,7 @@ public class TestConfServlet extends TestCase {
assertTrue(foundSetting);
}
- @Test
+ @Test (timeout=60000)
public void testWriteXml() throws Exception {
StringWriter sw = new StringWriter();
ConfServlet.writeResponse(getTestConf(), sw, "xml");
@@ -102,7 +102,7 @@ public class TestConfServlet extends TestCase {
assertTrue(foundSetting);
}
- @Test
+ @Test (timeout=60000)
public void testBadFormat() throws Exception {
StringWriter sw = new StringWriter();
try {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
index 81bcbd5..b369c25 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/lib/TestStaticUserWebFilter.java
@@ -46,7 +46,7 @@ public class TestStaticUserWebFilter {
return mock;
}
- @Test
+ @Test (timeout=60000)
public void testFilter() throws Exception {
FilterConfig config = mockConfig("myuser");
StaticUserFilter suf = new StaticUserFilter();
@@ -69,14 +69,14 @@ public class TestStaticUserWebFilter {
suf.destroy();
}
- @Test
+ @Test (timeout=60000)
public void testOldStyleConfiguration() {
Configuration conf = new Configuration();
conf.set("dfs.web.ugi", "joe,group1,group2");
assertEquals("joe", StaticUserWebFilter.getUsernameFromConf(conf));
}
- @Test
+ @Test (timeout=60000)
public void testConfiguration() {
Configuration conf = new Configuration();
conf.set(CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER, "dr.stack");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java
index 777b3cd..fee1eaf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java
@@ -50,7 +50,7 @@ public class TestFileLink {
* Test, on HDFS, that the FileLink is still readable
* even when the current file gets renamed.
*/
- @Test
+ @Test (timeout=180000)
public void testHDFSLinkReadDuringRename() throws Exception {
HBaseTestingUtility testUtil = new HBaseTestingUtility();
Configuration conf = testUtil.getConfiguration();
@@ -73,7 +73,7 @@ public class TestFileLink {
* Test, on a local filesystem, that the FileLink is still readable
* even when the current file gets renamed.
*/
- @Test
+ @Test (timeout=180000)
public void testLocalLinkReadDuringRename() throws IOException {
HBaseTestingUtility testUtil = new HBaseTestingUtility();
FileSystem fs = testUtil.getTestFileSystem();
@@ -145,7 +145,7 @@ public class TestFileLink {
* a query to the namenode is performed, using the filename,
* and the deleted file doesn't exists anymore (FileNotFoundException).
*/
- @Test
+ @Test (timeout=180000)
public void testHDFSLinkReadDuringDelete() throws Exception {
HBaseTestingUtility testUtil = new HBaseTestingUtility();
Configuration conf = testUtil.getConfiguration();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java
index f2b26c1..015a661 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java
@@ -42,7 +42,7 @@ import static org.junit.Assert.assertTrue;
@Category({IOTests.class, SmallTests.class})
public class TestHFileLink {
- @Test
+ @Test (timeout=60000)
public void testValidLinkNames() {
String validLinkNames[] = {"foo=fefefe-0123456", "ns=foo=abababa-fefefefe"};
@@ -82,7 +82,7 @@ public class TestHFileLink {
}
}
- @Test
+ @Test (timeout=60000)
public void testBackReference() {
Path rootDir = new Path("/root");
Path archiveDir = new Path(rootDir, ".archive");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
index 18595a8..6276b28 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
@@ -77,7 +77,7 @@ public class TestHalfStoreFileReader {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testHalfScanAndReseek() throws IOException {
String root_dir = TEST_UTIL.getDataTestDir().toString();
Path p = new Path(root_dir, "test");
@@ -143,7 +143,7 @@ public class TestHalfStoreFileReader {
// Tests the scanner on an HFile that is backed by HalfStoreFiles
- @Test
+ @Test (timeout=60000)
public void testHalfScanner() throws IOException {
String root_dir = TEST_UTIL.getDataTestDir().toString();
Path p = new Path(root_dir, "test");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
index d6423e8..0d8d74e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
@@ -86,7 +86,7 @@ public class TestHeapSize {
/**
* Test our hard-coded sizing of native java objects
*/
- @Test
+ @Test (timeout=60000)
public void testNativeSizes() throws IOException {
Class> cl;
long expected;
@@ -253,7 +253,7 @@ public class TestHeapSize {
* TestHFile since it is a non public class
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testSizes() throws IOException {
Class> cl;
long expected;
@@ -357,7 +357,7 @@ public class TestHeapSize {
// any of these classes are modified without updating overhead sizes.
}
- @Test
+ @Test (timeout=60000)
public void testMutations(){
Class> cl;
long expected;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestReference.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestReference.java
index 80295ff..f5b5b07 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestReference.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestReference.java
@@ -40,7 +40,7 @@ public class TestReference {
* Exercises the code path that parses Writables.
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testParsingWritableReference() throws IOException {
// Read a Reference written w/ 0.94 out of the test data dir.
final String datafile = System.getProperty("project.build.testSourceDirectory", "src/test") +
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java
index 9330cea..c557d09 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java
@@ -26,7 +26,7 @@ import org.junit.experimental.categories.Category;
@Category({IOTests.class, MediumTests.class})
public class TestBufferedDataBlockEncoder {
- @Test
+ @Test (timeout=180000)
public void testEnsureSpaceForKey() {
BufferedDataBlockEncoder.SeekerState state =
new BufferedDataBlockEncoder.SeekerState();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
index 918e0f1..67bfb56 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
@@ -238,7 +238,7 @@ public class TestChangingEncoding {
LOG.debug("Compaction queue size reached 0, continuing");
}
- @Test
+ @Test (timeout=300000)
public void testCrazyRandomChanges() throws Exception {
prepareTest("RandomChanges");
Random rand = new Random(2934298742974297L);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
index cabb67f..e994fca 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
@@ -102,7 +102,7 @@ public class TestDataBlockEncoders {
* @throws IOException
* On test failure.
*/
- @Test
+ @Test (timeout=300000)
public void testEmptyKeyValues() throws IOException {
List kvList = new ArrayList();
byte[] row = new byte[0];
@@ -129,7 +129,7 @@ public class TestDataBlockEncoders {
* @throws IOException
* On test failure.
*/
- @Test
+ @Test (timeout=300000)
public void testNegativeTimestamps() throws IOException {
List kvList = new ArrayList();
byte[] row = new byte[0];
@@ -156,7 +156,7 @@ public class TestDataBlockEncoders {
* pseudorandom sample.
* @throws IOException On test failure.
*/
- @Test
+ @Test (timeout=300000)
public void testExecutionOnSample() throws IOException {
List kvList = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);
testEncodersOnDataset(kvList, includesMemstoreTS, includesTags);
@@ -165,7 +165,7 @@ public class TestDataBlockEncoders {
/**
* Test seeking while file is encoded.
*/
- @Test
+ @Test (timeout=300000)
public void testSeekingOnSample() throws IOException {
List sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);
@@ -234,7 +234,7 @@ public class TestDataBlockEncoders {
return ByteBuffer.wrap(encodedData);
}
- @Test
+ @Test (timeout=300000)
public void testNextOnSample() throws IOException {
List sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);
@@ -288,7 +288,7 @@ public class TestDataBlockEncoders {
* Test whether the decompression of first key is implemented correctly.
* @throws IOException
*/
- @Test
+ @Test (timeout=300000)
public void testFirstKeyInBlockOnSample() throws IOException {
List sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);
@@ -379,7 +379,7 @@ public class TestDataBlockEncoders {
}
}
- @Test
+ @Test (timeout=300000)
public void testZeroByte() throws IOException {
List kvList = new ArrayList();
byte[] row = Bytes.toBytes("abcd");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
index e087457..d26bc9e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
@@ -97,7 +97,7 @@ public class TestEncodedSeekers {
this.compressTags = compressTags;
}
- @Test
+ @Test (timeout=180000)
public void testEncodedSeeker() throws IOException {
System.err.println("Testing encoded seekers for encoding : " + encoding + ", includeTags : "
+ includeTags + ", compressTags : " + compressTags);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java
index 80a50b0..b996b31 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java
@@ -83,7 +83,7 @@ public class TestPrefixTree {
testUtil.cleanupTestDir();
}
- @Test
+ @Test (timeout=60000)
public void testHBASE11728() throws Exception {
Put put = new Put(Bytes.toBytes("a-b-0-0"));
put.add(fam, qual1, Bytes.toBytes("c1-value"));
@@ -173,7 +173,7 @@ public class TestPrefixTree {
scanner.close();
}
- @Test
+ @Test (timeout=60000)
public void testHBASE12817() throws IOException {
for (int i = 0; i < 100; i++) {
region.put(new Put(Bytes.toBytes("obj" + (2900 + i))).add(fam, qual1, Bytes.toBytes(i)));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
index ee664bd..97f340b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
@@ -94,7 +94,7 @@ public class TestPrefixTreeEncoding {
formatRowNum = false;
}
- @Test
+ @Test (timeout=60000)
public void testSeekBeforeWithFixedData() throws Exception {
formatRowNum = true;
PrefixTreeCodec encoder = new PrefixTreeCodec();
@@ -142,7 +142,7 @@ public class TestPrefixTreeEncoding {
assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH - 1), seeker.getKeyValue().getRow());
}
- @Test
+ @Test (timeout=60000)
public void testScanWithRandomData() throws Exception {
PrefixTreeCodec encoder = new PrefixTreeCodec();
ByteArrayOutputStream baosInMemory = new ByteArrayOutputStream();
@@ -180,7 +180,7 @@ public class TestPrefixTreeEncoding {
} while (seeker.next());
}
- @Test
+ @Test (timeout=60000)
public void testSeekWithRandomData() throws Exception {
PrefixTreeCodec encoder = new PrefixTreeCodec();
ByteArrayOutputStream baosInMemory = new ByteArrayOutputStream();
@@ -203,7 +203,7 @@ public class TestPrefixTreeEncoding {
verifySeeking(seeker, readBuffer, batchId);
}
- @Test
+ @Test (timeout=60000)
public void testSeekWithFixedData() throws Exception {
PrefixTreeCodec encoder = new PrefixTreeCodec();
int batchId = numBatchesWritten++;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekToBlockWithEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekToBlockWithEncoders.java
index c053449..8381dc0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekToBlockWithEncoders.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekToBlockWithEncoders.java
@@ -42,7 +42,7 @@ public class TestSeekToBlockWithEncoders {
/**
* Test seeking while file is encoded.
*/
- @Test
+ @Test (timeout=60000)
public void testSeekToBlockWithNonMatchingSeekKey() throws IOException {
List sampleKv = new ArrayList();
KeyValue kv1 = new KeyValue(Bytes.toBytes("aaa"), Bytes.toBytes("f1"), Bytes.toBytes("q1"),
@@ -68,7 +68,7 @@ public class TestSeekToBlockWithEncoders {
/**
* Test seeking while file is encoded.
*/
- @Test
+ @Test (timeout=60000)
public void testSeekingToBlockWithBiggerNonLength1() throws IOException {
List sampleKv = new ArrayList();
KeyValue kv1 = new KeyValue(Bytes.toBytes("aaa"), Bytes.toBytes("f1"), Bytes.toBytes("q1"),
@@ -94,7 +94,7 @@ public class TestSeekToBlockWithEncoders {
/**
* Test seeking while file is encoded.
*/
- @Test
+ @Test (timeout=60000)
public void testSeekingToBlockToANotAvailableKey() throws IOException {
List sampleKv = new ArrayList();
KeyValue kv1 = new KeyValue(Bytes.toBytes("aaa"), Bytes.toBytes("f1"), Bytes.toBytes("q1"),
@@ -120,7 +120,7 @@ public class TestSeekToBlockWithEncoders {
/**
* Test seeking while file is encoded.
*/
- @Test
+ @Test (timeout=60000)
public void testSeekToBlockWithDecreasingCommonPrefix() throws IOException {
List sampleKv = new ArrayList();
KeyValue kv1 = new KeyValue(Bytes.toBytes("row10aaa"), Bytes.toBytes("f1"),
@@ -140,7 +140,7 @@ public class TestSeekToBlockWithEncoders {
seekToTheKey(kv3, sampleKv, toSeek);
}
- @Test
+ @Test (timeout=60000)
public void testSeekToBlockWithDiffQualifer() throws IOException {
List sampleKv = new ArrayList();
KeyValue kv1 = new KeyValue(Bytes.toBytes("aaa"), Bytes.toBytes("f1"), Bytes.toBytes("q1"),
@@ -160,7 +160,7 @@ public class TestSeekToBlockWithEncoders {
seekToTheKey(kv5, sampleKv, toSeek);
}
- @Test
+ @Test (timeout=60000)
public void testSeekToBlockWithDiffQualiferOnSameRow() throws IOException {
List sampleKv = new ArrayList();
KeyValue kv1 = new KeyValue(Bytes.toBytes("aaa"), Bytes.toBytes("f1"), Bytes.toBytes("q1"),
@@ -183,7 +183,7 @@ public class TestSeekToBlockWithEncoders {
seekToTheKey(kv6, sampleKv, toSeek);
}
- @Test
+ @Test (timeout=60000)
public void testSeekToBlockWithDiffQualiferOnSameRow1() throws IOException {
List sampleKv = new ArrayList();
KeyValue kv1 = new KeyValue(Bytes.toBytes("aaa"), Bytes.toBytes("f1"), Bytes.toBytes("q1"),
@@ -206,7 +206,7 @@ public class TestSeekToBlockWithEncoders {
seekToTheKey(kv5, sampleKv, toSeek);
}
- @Test
+ @Test (timeout=60000)
public void testSeekToBlockWithDiffQualiferOnSameRowButDescendingInSize() throws IOException {
List sampleKv = new ArrayList();
KeyValue kv1 = new KeyValue(Bytes.toBytes("aaa"), Bytes.toBytes("f1"), Bytes.toBytes("qual1"),
@@ -229,7 +229,7 @@ public class TestSeekToBlockWithEncoders {
seekToTheKey(kv6, sampleKv, toSeek);
}
- @Test
+ @Test (timeout=60000)
public void testSeekToBlockWithDiffFamilyAndQualifer() throws IOException {
List sampleKv = new ArrayList();
KeyValue kv1 = new KeyValue(Bytes.toBytes("aaa"), Bytes.toBytes("fam1"), Bytes.toBytes("q1"),
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java
index 4080249..42b3bac 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java
@@ -76,7 +76,7 @@ public class TestBlockCacheReporting {
bc.getStats().getEvictedCount();
}
- @Test
+ @Test (timeout=60000)
public void testBucketCache() throws JsonGenerationException, JsonMappingException, IOException {
this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");
this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, 100);
@@ -94,7 +94,7 @@ public class TestBlockCacheReporting {
LOG.info(BlockCacheUtil.toJSON(cbsbf));
}
- @Test
+ @Test (timeout=60000)
public void testLruBlockCache() throws JsonGenerationException, JsonMappingException, IOException {
CacheConfig cc = new CacheConfig(this.conf);
assertTrue(cc.isBlockCacheEnabled());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java
index c5fcc3c..9e3faff 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java
@@ -203,7 +203,7 @@ public class TestCacheConfig {
return cc.getBlockCache().getBlockCount();
}
- @Test
+ @Test (timeout=300000)
public void testCacheConfigDefaultLRUBlockCache() {
CacheConfig cc = new CacheConfig(this.conf);
assertTrue(cc.isBlockCacheEnabled());
@@ -215,19 +215,19 @@ public class TestCacheConfig {
/**
* Assert that the caches are deployed with CombinedBlockCache and of the appropriate sizes.
*/
- @Test
+ @Test (timeout=300000)
public void testOffHeapBucketCacheConfig() {
this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");
doBucketCacheConfigTest();
}
- @Test
+ @Test (timeout=300000)
public void testOnHeapBucketCacheConfig() {
this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "heap");
doBucketCacheConfigTest();
}
- @Test
+ @Test (timeout=300000)
public void testFileBucketCacheConfig() throws IOException {
HBaseTestingUtility htu = new HBaseTestingUtility(this.conf);
try {
@@ -318,7 +318,7 @@ public class TestCacheConfig {
* Test the cacheDataInL1 flag. When set, data blocks should be cached in the l1 tier, up in
* LruBlockCache when using CombinedBlockCcahe.
*/
- @Test
+ @Test (timeout=300000)
public void testCacheDataInL1() {
this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");
this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, 100);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
index 7ec7e08..9fc2286 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
@@ -481,13 +481,13 @@ public class TestCacheOnWrite {
region.close();
}
- @Test
+ @Test (timeout=180000)
public void testStoreFileCacheOnWrite() throws IOException {
testStoreFileCacheOnWriteInternals(false);
testStoreFileCacheOnWriteInternals(true);
}
- @Test
+ @Test (timeout=180000)
public void testNotCachingDataBlocksDuringCompaction() throws IOException, InterruptedException {
testNotCachingDataBlocksDuringCompactionInternals(false);
testNotCachingDataBlocksDuringCompactionInternals(true);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java
index 80266af..af00c3f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java
@@ -72,7 +72,7 @@ public class TestChecksum {
* Introduce checksum failures and check that we can still read
* the data
*/
- @Test
+ @Test (timeout=60000)
public void testChecksumCorruption() throws IOException {
testChecksumCorruptionInternals(false);
testChecksumCorruptionInternals(true);
@@ -180,7 +180,7 @@ public class TestChecksum {
/**
* Test different values of bytesPerChecksum
*/
- @Test
+ @Test (timeout=60000)
public void testChecksumChunks() throws IOException {
testChecksumInternals(false);
testChecksumInternals(true);
@@ -259,7 +259,7 @@ public class TestChecksum {
/**
* Test to ensure that these is at least one valid checksum implementation
*/
- @Test
+ @Test (timeout=60000)
public void testChecksumAlgorithm() throws IOException {
ChecksumType type = ChecksumType.CRC32;
assertEquals(ChecksumType.nameToType(type.getName()), type);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java
index 1b6731a..138cee0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java
@@ -87,7 +87,7 @@ public class TestFixedFileTrailer {
fs = FileSystem.get(util.getConfiguration());
}
- @Test
+ @Test (timeout=60000)
public void testTrailer() throws IOException {
FixedFileTrailer t = new FixedFileTrailer(version,
HFileReaderV2.PBUF_TRAILER_MINOR_VERSION);
@@ -168,7 +168,7 @@ public class TestFixedFileTrailer {
assertEquals(trailerStr, t4.toString());
}
- @Test
+ @Test (timeout=60000)
public void testTrailerForV2NonPBCompatibility() throws Exception {
if (version == 2) {
FixedFileTrailer t = new FixedFileTrailer(version,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
index 2af3a6e..1996678 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
@@ -102,7 +102,7 @@ public class TestForceCacheImportantBlocks {
HFile.dataBlockReadCnt.set(0);
}
- @Test
+ @Test (timeout=180000)
public void testCacheBlocks() throws IOException {
// Set index block size to be the same as normal block size.
TEST_UTIL.getConfiguration().setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, BLOCK_SIZE);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
index 3855629..70e3749 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
@@ -86,7 +86,7 @@ public class TestHFile extends HBaseTestCase {
* Test all features work reasonably when hfile is empty of entries.
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testEmptyHFile() throws IOException {
if (cacheConf == null) cacheConf = new CacheConfig(conf);
Path f = new Path(ROOT_DIR, getName());
@@ -103,7 +103,7 @@ public class TestHFile extends HBaseTestCase {
/**
* Create 0-length hfile and show that it fails
*/
- @Test
+ @Test (timeout=60000)
public void testCorrupt0LengthHFile() throws IOException {
if (cacheConf == null) cacheConf = new CacheConfig(conf);
Path f = new Path(ROOT_DIR, getName());
@@ -137,7 +137,7 @@ public class TestHFile extends HBaseTestCase {
/**
* Create a truncated hfile and verify that exception thrown.
*/
- @Test
+ @Test (timeout=60000)
public void testCorruptTruncatedHFile() throws IOException {
if (cacheConf == null) cacheConf = new CacheConfig(conf);
Path f = new Path(ROOT_DIR, getName());
@@ -287,13 +287,13 @@ public class TestHFile extends HBaseTestCase {
fs.delete(ncTFile, true);
}
- @Test
+ @Test (timeout=60000)
public void testTFileFeatures() throws IOException {
testTFilefeaturesInternals(false);
testTFilefeaturesInternals(true);
}
- @Test
+ @Test (timeout=60000)
protected void testTFilefeaturesInternals(boolean useTags) throws IOException {
basicWithSomeCodec("none", useTags);
basicWithSomeCodec("gz", useTags);
@@ -361,13 +361,13 @@ public class TestHFile extends HBaseTestCase {
}
// test meta blocks for tfiles
- @Test
+ @Test (timeout=60000)
public void testMetaBlocks() throws Exception {
metablocks("none");
metablocks("gz");
}
- @Test
+ @Test (timeout=60000)
public void testNullMetaBlocks() throws Exception {
if (cacheConf == null) cacheConf = new CacheConfig(conf);
for (Compression.Algorithm compressAlgo :
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
index eb1f1bb..1765d3c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
@@ -232,7 +232,7 @@ public class TestHFileBlock {
return Bytes.toStringBinary(testV2Block);
}
- @Test
+ @Test (timeout=180000)
public void testNoCompression() throws IOException {
CacheConfig cacheConf = Mockito.mock(CacheConfig.class);
Mockito.when(cacheConf.isBlockCacheEnabled()).thenReturn(false);
@@ -244,7 +244,7 @@ public class TestHFileBlock {
assertTrue(block.isUnpacked());
}
- @Test
+ @Test (timeout=180000)
public void testGzipCompression() throws IOException {
final String correctTestBlockStr =
"DATABLK*\\x00\\x00\\x00>\\x00\\x00\\x0F\\xA0\\xFF\\xFF\\xFF\\xFF"
@@ -273,7 +273,7 @@ public class TestHFileBlock {
testBlockStr.substring(0, correctGzipBlockLength - 4));
}
- @Test
+ @Test (timeout=180000)
public void testReaderV2() throws IOException {
testReaderV2Internals();
}
@@ -353,7 +353,7 @@ public class TestHFileBlock {
* Test encoding/decoding data blocks.
* @throws IOException a bug or a problem with temporary files.
*/
- @Test
+ @Test (timeout=180000)
public void testDataBlockEncoding() throws IOException {
testInternals();
}
@@ -511,7 +511,7 @@ public class TestHFileBlock {
numBytes) + (numBytes < maxBytes ? "..." : "");
}
- @Test
+ @Test (timeout=180000)
public void testPreviousOffset() throws IOException {
testPreviousOffsetInternals();
}
@@ -703,7 +703,7 @@ public class TestHFileBlock {
}
- @Test
+ @Test (timeout=180000)
public void testConcurrentReading() throws Exception {
testConcurrentReadingInternals();
}
@@ -812,7 +812,7 @@ public class TestHFileBlock {
return totalSize;
}
- @Test
+ @Test (timeout=180000)
public void testBlockHeapSize() {
testBlockHeapSizeInternals();
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java
index fc44f3c..baa16d5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java
@@ -137,13 +137,13 @@ public class TestHFileBlockCompatibility {
return Bytes.toStringBinary(testV2Block);
}
- @Test
+ @Test (timeout=60000)
public void testNoCompression() throws IOException {
assertEquals(4000, createTestV2Block(NONE).getBlockForCaching().
getUncompressedSizeWithoutHeader());
}
- @Test
+ @Test (timeout=60000)
public void testGzipCompression() throws IOException {
final String correctTestBlockStr =
"DATABLK*\\x00\\x00\\x00:\\x00\\x00\\x0F\\xA0\\xFF\\xFF\\xFF\\xFF"
@@ -166,7 +166,7 @@ public class TestHFileBlockCompatibility {
assertEquals(correctTestBlockStr, returnedStr);
}
- @Test
+ @Test (timeout=60000)
public void testReaderV2() throws IOException {
if(includesTag) {
TEST_UTIL.getConfiguration().setInt("hfile.format.version", 3);
@@ -238,7 +238,7 @@ public class TestHFileBlockCompatibility {
* Test encoding/decoding data blocks.
* @throws IOException a bug or a problem with temporary files.
*/
- @Test
+ @Test (timeout=60000)
public void testDataBlockEncoding() throws IOException {
if(includesTag) {
TEST_UTIL.getConfiguration().setInt("hfile.format.version", 3);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
index 939c019..9e43727 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
@@ -120,7 +120,7 @@ public class TestHFileBlockIndex {
fs = HFileSystem.get(conf);
}
- @Test
+ @Test (timeout=180000)
public void testBlockIndex() throws IOException {
testBlockIndexInternals(false);
clear();
@@ -318,7 +318,7 @@ public class TestHFileBlockIndex {
return i * i * 37 + i * 19 + 13;
}
- @Test
+ @Test (timeout=180000)
public void testSecondaryIndexBinarySearch() throws IOException {
int numTotalKeys = 99;
assertTrue(numTotalKeys % 2 == 1); // Ensure no one made this even.
@@ -445,7 +445,7 @@ public class TestHFileBlockIndex {
}
- @Test
+ @Test (timeout=180000)
public void testBlockIndexChunk() throws IOException {
BlockIndexChunk c = new BlockIndexChunk();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
@@ -482,7 +482,7 @@ public class TestHFileBlockIndex {
}
/** Checks if the HeapSize calculator is within reason */
- @Test
+ @Test (timeout=180000)
public void testHeapSizeForBlockIndex() throws IOException {
Class cl =
HFileBlockIndex.BlockIndexReader.class;
@@ -510,7 +510,7 @@ public class TestHFileBlockIndex {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testHFileWriterAndReader() throws IOException {
Path hfilePath = new Path(TEST_UTIL.getDataTestDir(),
"hfile_for_block_index");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
index 3cdc92b..dc2bc6c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
@@ -68,7 +68,7 @@ public class TestHFileDataBlockEncoder {
* Test putting and taking out blocks into cache with different
* encoding options.
*/
- @Test
+ @Test (timeout=60000)
public void testEncodingWithCache() throws IOException {
testEncodingWithCacheInternals(false);
testEncodingWithCacheInternals(true);
@@ -102,7 +102,7 @@ public class TestHFileDataBlockEncoder {
}
/** Test for HBASE-5746. */
- @Test
+ @Test (timeout=60000)
public void testHeaderSizeInCacheWithoutChecksum() throws Exception {
testHeaderSizeInCacheWithoutChecksumInternals(false);
testHeaderSizeInCacheWithoutChecksumInternals(true);
@@ -135,7 +135,7 @@ public class TestHFileDataBlockEncoder {
* Test encoding.
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testEncoding() throws IOException {
testEncodingInternals(false);
testEncodingInternals(true);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java
index c0683f8..eb3dff9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java
@@ -46,7 +46,7 @@ public class TestHFileInlineToRootChunkConversion {
private final HBaseTestingUtility testUtil = new HBaseTestingUtility();
private final Configuration conf = testUtil.getConfiguration();
- @Test
+ @Test (timeout=60000)
public void testWriteHFile() throws Exception {
Path hfPath = new Path(testUtil.getDataTestDir(),
TestHFileInlineToRootChunkConversion.class.getSimpleName() + ".hfile");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java
index 42e918a..6d9b7d0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java
@@ -75,7 +75,7 @@ public class TestHFileWriterV2 {
fs = FileSystem.get(conf);
}
- @Test
+ @Test (timeout=60000)
public void testHFileFormatV2() throws IOException {
Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "testHFileFormatV2");
final Compression.Algorithm compressAlgo = Compression.Algorithm.GZ;
@@ -83,7 +83,7 @@ public class TestHFileWriterV2 {
writeDataAndReadFromHFile(hfilePath, compressAlgo, entryCount, false);
}
- @Test
+ @Test (timeout=60000)
public void testMidKeyInHFile() throws IOException{
Path hfilePath = new Path(TEST_UTIL.getDataTestDir(),
"testMidKeyInHFile");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
index f96e8ef..8449d6c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
@@ -89,7 +89,7 @@ public class TestHFileWriterV3 {
fs = FileSystem.get(conf);
}
- @Test
+ @Test (timeout=60000)
public void testHFileFormatV3() throws IOException {
testHFileFormatV3Internals(useTags);
}
@@ -101,7 +101,7 @@ public class TestHFileWriterV3 {
writeDataAndReadFromHFile(hfilePath, compressAlgo, entryCount, false, useTags);
}
- @Test
+ @Test (timeout=60000)
public void testMidKeyInHFile() throws IOException{
testMidKeyInHFileInternals(useTags);
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java
index 2fd3684..63eb802 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java
@@ -134,7 +134,7 @@ public class TestLazyDataBlockDecompression {
LOG.info("read " + Iterables.toString(blocks));
}
- @Test
+ @Test (timeout=60000)
public void testCompressionIncreasesEffectiveBlockCacheSize() throws Exception {
// enough room for 2 uncompressed block
int maxSize = (int) (HConstants.DEFAULT_BLOCKSIZE * 2.1);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
index ec60bcd..adc3652 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
@@ -43,7 +43,7 @@ import org.junit.experimental.categories.Category;
public class TestLruBlockCache {
- @Test
+ @Test (timeout=60000)
public void testBackgroundEvictionThread() throws Exception {
long maxSize = 100000;
int numBlocks = 9;
@@ -93,7 +93,7 @@ public class TestLruBlockCache {
System.out.println("Background Evictions run: " + evictionCount);
}
- @Test
+ @Test (timeout=60000)
public void testCacheSimple() throws Exception {
long maxSize = 1000000;
@@ -152,7 +152,7 @@ public class TestLruBlockCache {
t.join();
}
- @Test
+ @Test (timeout=60000)
public void testCacheEvictionSimple() throws Exception {
long maxSize = 100000;
@@ -192,7 +192,7 @@ public class TestLruBlockCache {
}
}
- @Test
+ @Test (timeout=60000)
public void testCacheEvictionTwoPriorities() throws Exception {
long maxSize = 100000;
@@ -251,7 +251,7 @@ public class TestLruBlockCache {
}
}
- @Test
+ @Test (timeout=60000)
public void testCacheEvictionThreePriorities() throws Exception {
long maxSize = 100000;
@@ -372,7 +372,7 @@ public class TestLruBlockCache {
assertEquals(null, cache.getBlock(memoryBlocks[3].cacheKey, true, false, true));
}
- @Test
+ @Test (timeout=60000)
public void testCacheEvictionInMemoryForceMode() throws Exception {
long maxSize = 100000;
long blockSize = calculateBlockSize(maxSize, 10);
@@ -476,7 +476,7 @@ public class TestLruBlockCache {
}
// test scan resistance
- @Test
+ @Test (timeout=60000)
public void testScanResistance() throws Exception {
long maxSize = 100000;
@@ -540,7 +540,7 @@ public class TestLruBlockCache {
}
// test setMaxSize
- @Test
+ @Test (timeout=60000)
public void testResizeBlockCache() throws Exception {
long maxSize = 300000;
@@ -603,7 +603,7 @@ public class TestLruBlockCache {
}
// test metricsPastNPeriods
- @Test
+ @Test (timeout=60000)
public void testPastNPeriodsMetrics() throws Exception {
double delta = 0.01;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruCachedBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruCachedBlock.java
index 141c95b..78c7a54 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruCachedBlock.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruCachedBlock.java
@@ -47,7 +47,7 @@ public class TestLruCachedBlock {
blockNotEqual = new LruCachedBlock(cacheKey, cacheable, 1);
}
- @Test
+ @Test (timeout=60000)
public void testEquality() {
assertEquals(block.hashCode(), blockEqual.hashCode());
assertNotEquals(block.hashCode(), blockNotEqual.hashCode());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
index 3a0fdf7..4d5b1ff 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
@@ -44,7 +44,7 @@ public class TestReseekTo {
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
- @Test
+ @Test (timeout=60000)
public void testReseekTo() throws Exception {
testReseekToInternals(TagUsage.NO_TAG);
testReseekToInternals(TagUsage.ONLY_TAG);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
index e8f6c1b..35a7143 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
@@ -90,7 +90,7 @@ public class TestScannerSelectionUsingKeyRange {
TEST_UTIL.cleanupTestDir();
}
- @Test
+ @Test (timeout=60000)
public void testScannerSelection() throws IOException {
Configuration conf = TEST_UTIL.getConfiguration();
conf.setInt("hbase.hstore.compactionThreshold", 10000);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
index 1c426e4..31de042 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
@@ -95,7 +95,7 @@ public class TestScannerSelectionUsingTTL {
this.explicitCompaction = explicitCompaction;
}
- @Test
+ @Test (timeout=180000)
public void testScannerSelection() throws IOException {
Configuration conf = TEST_UTIL.getConfiguration();
conf.setBoolean("hbase.store.delete.expired.storefile", false);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
index b9a126f..a29a864 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
@@ -98,7 +98,7 @@ public class TestSeekTo extends HBaseTestCase {
return ncTFile;
}
- @Test
+ @Test (timeout=60000)
public void testSeekBefore() throws Exception {
testSeekBeforeInternals(TagUsage.NO_TAG);
testSeekBeforeInternals(TagUsage.ONLY_TAG);
@@ -140,7 +140,7 @@ public class TestSeekTo extends HBaseTestCase {
reader.close();
}
- @Test
+ @Test (timeout=60000)
public void testSeekBeforeWithReSeekTo() throws Exception {
testSeekBeforeWithReSeekToInternals(TagUsage.NO_TAG);
testSeekBeforeWithReSeekToInternals(TagUsage.ONLY_TAG);
@@ -230,7 +230,7 @@ public class TestSeekTo extends HBaseTestCase {
assertEquals("k", toRowStr(scanner.getKeyValue()));
}
- @Test
+ @Test (timeout=60000)
public void testSeekTo() throws Exception {
testSeekToInternals(TagUsage.NO_TAG);
testSeekToInternals(TagUsage.ONLY_TAG);
@@ -260,7 +260,7 @@ public class TestSeekTo extends HBaseTestCase {
reader.close();
}
- @Test
+ @Test (timeout=60000)
public void testBlockContainingKey() throws Exception {
testBlockContainingKeyInternals(TagUsage.NO_TAG);
testBlockContainingKeyInternals(TagUsage.ONLY_TAG);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
index d29be01..e62890d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
@@ -129,7 +129,7 @@ public class TestBucketCache {
return a.get(RAND.nextInt(a.size()));
}
- @Test
+ @Test (timeout=60000)
public void testBucketAllocator() throws BucketAllocatorException {
BucketAllocator mAllocator = cache.getAllocator();
/*
@@ -167,17 +167,17 @@ public class TestBucketCache {
assertEquals(0, mAllocator.getUsedSize());
}
- @Test
+ @Test (timeout=60000)
public void testCacheSimple() throws Exception {
CacheTestUtils.testCacheSimple(cache, BLOCK_SIZE, NUM_QUERIES);
}
- @Test
+ @Test (timeout=60000)
public void testCacheMultiThreadedSingleKey() throws Exception {
CacheTestUtils.hammerSingleKey(cache, BLOCK_SIZE, NUM_THREADS, NUM_QUERIES);
}
- @Test
+ @Test (timeout=60000)
public void testHeapSizeChanges() throws Exception {
cache.stopWriterThreads();
CacheTestUtils.testHeapSizeChanges(cache, BLOCK_SIZE);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java
index 4d3f550..e948f1c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java
@@ -116,7 +116,7 @@ public class TestBucketWriterThread {
* Manually run the WriterThread.
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=60000)
public void testTooBigEntry() throws InterruptedException {
Cacheable tooBigCacheable = Mockito.mock(Cacheable.class);
Mockito.when(tooBigCacheable.getSerializedLength()).thenReturn(Integer.MAX_VALUE);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestByteBufferIOEngine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestByteBufferIOEngine.java
index 511f942..404e275 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestByteBufferIOEngine.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestByteBufferIOEngine.java
@@ -33,7 +33,7 @@ import org.junit.experimental.categories.Category;
@Category({IOTests.class, SmallTests.class})
public class TestByteBufferIOEngine {
- @Test
+ @Test (timeout=60000)
public void testByteBufferIOEngine() throws Exception {
int capacity = 32 * 1024 * 1024; // 32 MB
int testNum = 100;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
index 8306114..8feb779 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
@@ -34,7 +34,7 @@ import org.junit.experimental.categories.Category;
*/
@Category({IOTests.class, SmallTests.class})
public class TestFileIOEngine {
- @Test
+ @Test (timeout=60000)
public void testFileIOEngine() throws IOException {
int size = 2 * 1024 * 1024; // 2 MB
String filePath = "testFileIOEngine";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestAsyncIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestAsyncIPC.java
index 768871c..2b8c9e4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestAsyncIPC.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestAsyncIPC.java
@@ -151,7 +151,7 @@ public class TestAsyncIPC extends AbstractTestIPC {
});
}
- @Test
+ @Test (timeout=60000)
public void testAsyncConnectionSetup() throws Exception {
TestRpcServer rpcServer = new TestRpcServer();
AsyncRpcClient client = createRpcClient(CONF);
@@ -187,7 +187,7 @@ public class TestAsyncIPC extends AbstractTestIPC {
}
}
- @Test
+ @Test (timeout=60000)
public void testRTEDuringAsyncConnectionSetup() throws Exception {
TestRpcServer rpcServer = new TestRpcServer();
AsyncRpcClient client = createRpcClientRTEDuringConnectionSetup(CONF);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestBufferChain.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestBufferChain.java
index e8f6464..dce5193 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestBufferChain.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestBufferChain.java
@@ -57,14 +57,14 @@ public class TestBufferChain {
tmpFile.delete();
}
- @Test
+ @Test (timeout=60000)
public void testGetBackBytesWePutIn() {
ByteBuffer[] bufs = wrapArrays(HELLO_WORLD_CHUNKS);
BufferChain chain = new BufferChain(bufs);
assertTrue(Bytes.equals(Bytes.toBytes("hello world"), chain.getBytes()));
}
- @Test
+ @Test (timeout=60000)
public void testChainChunkBiggerThanWholeArray() throws IOException {
ByteBuffer[] bufs = wrapArrays(HELLO_WORLD_CHUNKS);
BufferChain chain = new BufferChain(bufs);
@@ -72,7 +72,7 @@ public class TestBufferChain {
assertNoRemaining(bufs);
}
- @Test
+ @Test (timeout=60000)
public void testChainChunkBiggerThanSomeArrays() throws IOException {
ByteBuffer[] bufs = wrapArrays(HELLO_WORLD_CHUNKS);
BufferChain chain = new BufferChain(bufs);
@@ -80,7 +80,7 @@ public class TestBufferChain {
assertNoRemaining(bufs);
}
- @Test
+ @Test (timeout=60000)
public void testLimitOffset() throws IOException {
ByteBuffer[] bufs = new ByteBuffer[] {
stringBuf("XXXhelloYYY", 3, 5),
@@ -91,7 +91,7 @@ public class TestBufferChain {
assertNoRemaining(bufs);
}
- @Test
+ @Test (timeout=60000)
public void testWithSpy() throws IOException {
ByteBuffer[] bufs = new ByteBuffer[] {
stringBuf("XXXhelloYYY", 3, 5),
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestCallRunner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestCallRunner.java
index be16529..d7d1ec6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestCallRunner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestCallRunner.java
@@ -29,7 +29,7 @@ public class TestCallRunner {
/**
* Does nothing but exercise a {@link CallRunner} outside of {@link RpcServer} context.
*/
- @Test
+ @Test (timeout=60000)
public void testSimpleCall() {
RpcServerInterface mockRpcServer = Mockito.mock(RpcServerInterface.class);
Mockito.when(mockRpcServer.isStarted()).thenReturn(true);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java
index 961001f..b76d101 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java
@@ -282,7 +282,7 @@ public class TestDelayedRpc {
}
}
- @Test
+ @Test (timeout=180000)
public void testEndDelayThrowing() throws IOException {
Configuration conf = HBaseConfiguration.create();
InetSocketAddress isa = new InetSocketAddress("localhost", 0);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestGlobalEventLoopGroup.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestGlobalEventLoopGroup.java
index 60dbd1b..2cc89d7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestGlobalEventLoopGroup.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestGlobalEventLoopGroup.java
@@ -33,7 +33,7 @@ import org.junit.experimental.categories.Category;
@Category({ RPCTests.class, SmallTests.class })
public class TestGlobalEventLoopGroup {
- @Test
+ @Test (timeout=60000)
public void test() {
Configuration conf = HBaseConfiguration.create();
conf.setBoolean(AsyncRpcClient.USE_GLOBAL_EVENT_LOOP_GROUP, true);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseClient.java
index 26488cf..ba4cb8c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseClient.java
@@ -32,7 +32,7 @@ import java.net.InetSocketAddress;
@Category({RPCTests.class, MediumTests.class}) // Can't be small, we're playing with the EnvironmentEdge
public class TestHBaseClient {
- @Test
+ @Test (timeout=180000)
public void testFailedServer(){
ManualEnvironmentEdge ee = new ManualEnvironmentEdge();
EnvironmentEdgeManager.injectEdge( ee );
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
index cee459f..b97bff6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
@@ -110,7 +110,7 @@ public class TestProtoBufRpc {
server.stop();
}
- @Test
+ @Test (timeout=180000)
public void testProtoBufRpc() throws Exception {
RpcClient rpcClient = RpcClientFactory.createClient(conf, HConstants.CLUSTER_ID_DEFAULT);
try {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcHandlerException.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcHandlerException.java
index 298f086..30c98a0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcHandlerException.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcHandlerException.java
@@ -166,7 +166,7 @@ public class TestRpcHandlerException {
* caught errors exceeds the threshold. Client will hang when RS aborts.
*/
@Ignore
- @Test
+ @Test (timeout=60000)
public void testRpcScheduler() throws IOException, InterruptedException {
PriorityFunction qosFunction = mock(PriorityFunction.class);
Abortable abortable = new AbortServer();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcMetrics.java
index 443ec78..a418f48 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcMetrics.java
@@ -33,7 +33,7 @@ import static org.junit.Assert.*;
public class TestRpcMetrics {
public MetricsAssertHelper HELPER = CompatibilityFactory.getInstance(MetricsAssertHelper.class);
- @Test
+ @Test (timeout=60000)
public void testFactory() {
MetricsHBaseServer masterMetrics = new MetricsHBaseServer("HMaster", new MetricsHBaseServerWrapperStub());
MetricsHBaseServerSource masterSource = masterMetrics.getMetricsSource();
@@ -56,7 +56,7 @@ public class TestRpcMetrics {
* This test makes sure that the numbers from a MetricsHBaseServerWrapper are correctly exported
* to hadoop metrics 2 system.
*/
- @Test
+ @Test (timeout=60000)
public void testWrapperSource() {
MetricsHBaseServer mrpc = new MetricsHBaseServer("HMaster", new MetricsHBaseServerWrapperStub());
MetricsHBaseServerSource serverSource = mrpc.getMetricsSource();
@@ -71,7 +71,7 @@ public class TestRpcMetrics {
/**
* Test to make sure that all the actively called method on MetricsHBaseServer work.
*/
- @Test
+ @Test (timeout=60000)
public void testSourceMethods() {
MetricsHBaseServer mrpc = new MetricsHBaseServer("HMaster", new MetricsHBaseServerWrapperStub());
MetricsHBaseServerSource serverSource = mrpc.getMetricsSource();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
index 11ac43f..4c781f8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
@@ -75,7 +75,7 @@ public class TestSimpleRpcScheduler {
conf = HBaseConfiguration.create();
}
- @Test
+ @Test (timeout=60000)
public void testBasic() throws IOException, InterruptedException {
PriorityFunction qosFunction = mock(PriorityFunction.class);
RpcScheduler scheduler = new SimpleRpcScheduler(
@@ -88,7 +88,7 @@ public class TestSimpleRpcScheduler {
scheduler.stop();
}
- @Test
+ @Test (timeout=60000)
public void testHandlerIsolation() throws IOException, InterruptedException {
CallRunner generalTask = createMockTask();
CallRunner priorityTask = createMockTask();
@@ -146,7 +146,7 @@ public class TestSimpleRpcScheduler {
return task;
}
- @Test
+ @Test (timeout=60000)
public void testRpcScheduler() throws Exception {
testRpcScheduler(SimpleRpcScheduler.CALL_QUEUE_TYPE_DEADLINE_CONF_VALUE);
testRpcScheduler(SimpleRpcScheduler.CALL_QUEUE_TYPE_FIFO_CONF_VALUE);
@@ -227,7 +227,7 @@ public class TestSimpleRpcScheduler {
}
}
- @Test
+ @Test (timeout=60000)
public void testScanQueues() throws Exception {
Configuration schedConf = HBaseConfiguration.create();
schedConf.setFloat(SimpleRpcScheduler.CALL_QUEUE_HANDLER_FACTOR_CONF_KEY, 1.0f);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java
index ab6a86d..8b05f68 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java
@@ -31,7 +31,7 @@ import static org.mockito.Mockito.verify;
@Category({MapReduceTests.class, SmallTests.class})
public class TestDriver {
- @Test
+ @Test (timeout=60000)
public void testDriverMainMethod() throws Throwable {
ProgramDriver programDriverMock = mock(ProgramDriver.class);
Driver.setProgramDriver(programDriverMock);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java
index 90ed73b..28928c5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java
@@ -52,7 +52,7 @@ import com.google.common.collect.ImmutableList;
@Category({MapReduceTests.class, SmallTests.class})
public class TestGroupingTableMap {
- @Test
+ @Test (timeout=60000)
@SuppressWarnings({ "deprecation", "unchecked" })
public void shouldNotCallCollectonSinceFindUniqueKeyValueMoreThanOnes()
throws Exception {
@@ -83,7 +83,7 @@ public class TestGroupingTableMap {
}
}
- @Test
+ @Test (timeout=60000)
@SuppressWarnings({ "deprecation", "unchecked" })
public void shouldCreateNewKeyAlthoughExtraKey() throws Exception {
GroupingTableMap gTableMap = null;
@@ -115,7 +115,7 @@ public class TestGroupingTableMap {
}
}
- @Test
+ @Test (timeout=60000)
@SuppressWarnings({ "deprecation" })
public void shouldCreateNewKey() throws Exception {
GroupingTableMap gTableMap = null;
@@ -164,7 +164,7 @@ public class TestGroupingTableMap {
}
}
- @Test
+ @Test (timeout=60000)
@SuppressWarnings({ "deprecation" })
public void shouldReturnNullFromCreateGroupKey() throws Exception {
GroupingTableMap gTableMap = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java
index 3fad1fe..93e8f5f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestIdentityTableMap.java
@@ -37,7 +37,7 @@ import org.mockito.Mockito;
@Category({MapReduceTests.class, SmallTests.class})
public class TestIdentityTableMap {
- @Test
+ @Test (timeout=60000)
@SuppressWarnings({ "deprecation", "unchecked" })
public void shouldCollectPredefinedTimes() throws IOException {
int recordNumber = 999;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java
index 6c7e445..6a62f19 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java
@@ -48,7 +48,7 @@ import com.google.common.base.Joiner;
@Category({MapReduceTests.class, SmallTests.class})
public class TestRowCounter {
- @Test
+ @Test (timeout=60000)
@SuppressWarnings("deprecation")
public void shouldPrintUsage() throws Exception {
String expectedOutput = "rowcounter [...]";
@@ -62,7 +62,7 @@ public class TestRowCounter {
assertTrue(result.startsWith(expectedOutput));
}
- @Test
+ @Test (timeout=60000)
@SuppressWarnings("deprecation")
public void shouldExitAndPrintUsageSinceParameterNumberLessThanThree()
throws Exception {
@@ -78,7 +78,7 @@ public class TestRowCounter {
assertTrue(result.startsWith(line));
}
- @Test
+ @Test (timeout=60000)
@SuppressWarnings({ "deprecation", "unchecked" })
public void shouldRegInReportEveryIncomingRow() throws IOException {
int iterationNumber = 999;
@@ -92,7 +92,7 @@ public class TestRowCounter {
any(Enum.class), anyInt());
}
- @Test
+ @Test (timeout=60000)
@SuppressWarnings({ "deprecation" })
public void shouldCreateAndRunSubmittableJob() throws Exception {
RowCounter rCounter = new RowCounter();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java
index 216041d..d695bea 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java
@@ -33,7 +33,7 @@ import org.junit.experimental.categories.Category;
@Category({MapReduceTests.class, SmallTests.class})
public class TestSplitTable {
- @Test
+ @Test (timeout=60000)
@SuppressWarnings("deprecation")
public void testSplitTableCompareTo() {
TableSplit aTableSplit = new TableSplit(Bytes.toBytes("tableA"),
@@ -61,7 +61,7 @@ public class TestSplitTable {
assertTrue(cTableSplit.compareTo(aTableSplit) > 0);
}
- @Test
+ @Test (timeout=60000)
@SuppressWarnings("deprecation")
public void testSplitTableEquals() {
byte[] tableA = Bytes.toBytes("tableA");
@@ -92,7 +92,7 @@ public class TestSplitTable {
assertEquals(tablesplit, same);
}
- @Test
+ @Test (timeout=60000)
@SuppressWarnings("deprecation")
public void testToString() {
TableSplit split =
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
index d7dd8ec..ba8ff80 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
@@ -265,7 +265,7 @@ public class TestTableInputFormat {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=300000)
public void testTableRecordReader() throws IOException {
Table table = createTable("table1".getBytes());
runTestMapred(table);
@@ -276,7 +276,7 @@ public class TestTableInputFormat {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=300000)
public void testTableRecordReaderScannerFail() throws IOException {
Table htable = createIOEScannerTable("table2".getBytes(), 1);
runTestMapred(htable);
@@ -299,7 +299,7 @@ public class TestTableInputFormat {
*
* @throws org.apache.hadoop.hbase.DoNotRetryIOException
*/
- @Test
+ @Test (timeout=300000)
public void testTableRecordReaderScannerTimeout() throws IOException {
Table htable = createDNRIOEScannerTable("table4".getBytes(), 1);
runTestMapred(htable);
@@ -320,7 +320,7 @@ public class TestTableInputFormat {
/**
* Verify the example we present in javadocs on TableInputFormatBase
*/
- @Test
+ @Test (timeout=300000)
public void testExtensionOfTableInputFormatBase() throws IOException {
LOG.info("testing use of an InputFormat taht extends InputFormatBase");
final Table table = createTable(Bytes.toBytes("exampleTable"),
@@ -328,7 +328,7 @@ public class TestTableInputFormat {
testInputFormat(ExampleTIF.class);
}
- @Test
+ @Test (timeout=300000)
public void testDeprecatedExtensionOfTableInputFormatBase() throws IOException {
LOG.info("testing use of an InputFormat taht extends InputFormatBase, "
+ "as it was given in 0.98.");
@@ -337,7 +337,7 @@ public class TestTableInputFormat {
testInputFormat(ExampleDeprecatedTIF.class);
}
- @Test
+ @Test (timeout=300000)
public void testJobConfigurableExtensionOfTableInputFormatBase() throws IOException {
LOG.info("testing use of an InputFormat taht extends InputFormatBase, "
+ "using JobConfigurable.");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
index 628bb96..6e3fa61 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
@@ -135,7 +135,7 @@ public class TestTableMapReduceUtil {
* Check what the given number of reduce tasks for the given job configuration
* does not exceed the number of regions for the given table.
*/
- @Test
+ @Test (timeout=300000)
public void shouldNumberOfReduceTaskNotExceedNumberOfRegionsForGivenTable()
throws IOException {
Assert.assertNotNull(presidentsTable);
@@ -153,7 +153,7 @@ public class TestTableMapReduceUtil {
assertEquals(1, jobConf.getNumReduceTasks());
}
- @Test
+ @Test (timeout=300000)
public void shouldNumberOfMapTaskNotExceedNumberOfRegionsForGivenTable()
throws IOException {
Configuration cfg = UTIL.getConfiguration();
@@ -168,7 +168,7 @@ public class TestTableMapReduceUtil {
assertEquals(1, jobConf.getNumMapTasks());
}
- @Test
+ @Test (timeout=300000)
@SuppressWarnings("deprecation")
public void shoudBeValidMapReduceEvaluation() throws Exception {
Configuration cfg = UTIL.getConfiguration();
@@ -189,7 +189,7 @@ public class TestTableMapReduceUtil {
}
}
- @Test
+ @Test (timeout=300000)
@SuppressWarnings("deprecation")
public void shoudBeValidMapReduceWithPartitionerEvaluation()
throws IOException {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableSnapshotInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableSnapshotInputFormat.java
index eabedec..706daed 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableSnapshotInputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableSnapshotInputFormat.java
@@ -95,7 +95,7 @@ public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBa
}
}
- @Test
+ @Test (timeout=300000)
public void testInitTableSnapshotMapperJobConfig() throws Exception {
setupCluster();
TableName tableName = TableName.valueOf("testInitTableSnapshotMapperJobConfig");
@@ -129,19 +129,19 @@ public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBa
// TODO: mapred does not support limiting input range by startrow, endrow.
// Thus the following tests must override parameterverification.
- @Test
+ @Test (timeout=300000)
@Override
public void testWithMockedMapReduceMultiRegion() throws Exception {
testWithMockedMapReduce(UTIL, "testWithMockedMapReduceMultiRegion", 10, 10);
}
- @Test
+ @Test (timeout=300000)
@Override
public void testWithMapReduceMultiRegion() throws Exception {
testWithMapReduce(UTIL, "testWithMapReduceMultiRegion", 10, 10, false);
}
- @Test
+ @Test (timeout=300000)
@Override
// run the MR job while HBase is offline
public void testWithMapReduceAndOfflineHBaseMultiRegion() throws Exception {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
index 22bc330..7a6de95 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
@@ -321,7 +321,7 @@ public class TestCellCounter {
}
}
- @Test
+ @Test (timeout=300000)
public void TestCellCounterWithoutOutputDir() throws Exception {
String[] args = new String[] { "tableName" };
assertEquals("CellCounter should exit with -1 as output directory is not specified.", -1,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
index 4b11abb..2d683bd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
@@ -119,7 +119,7 @@ public class TestCopyTable {
* Simple end-to-end test
* @throws Exception
*/
- @Test
+ @Test (timeout=300000)
public void testCopyTable() throws Exception {
doCopyTableTest(false);
}
@@ -127,12 +127,12 @@ public class TestCopyTable {
/**
* Simple end-to-end test with bulkload.
*/
- @Test
+ @Test (timeout=300000)
public void testCopyTableWithBulkload() throws Exception {
doCopyTableTest(true);
}
- @Test
+ @Test (timeout=300000)
public void testStartStopRow() throws Exception {
final TableName TABLENAME1 = TableName.valueOf("testStartStopRow1");
final TableName TABLENAME2 = TableName.valueOf("testStartStopRow2");
@@ -186,7 +186,7 @@ public class TestCopyTable {
/**
* Test copy of table from sourceTable to targetTable all rows from family a
*/
- @Test
+ @Test (timeout=300000)
public void testRenameFamily() throws Exception {
String sourceTable = "sourceTable";
String targetTable = "targetTable";
@@ -229,7 +229,7 @@ public class TestCopyTable {
/**
* Test main method of CopyTable.
*/
- @Test
+ @Test (timeout=300000)
public void testMainMethod() throws Exception {
String[] emptyArgs = { "-h" };
PrintStream oldWriter = System.err;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java
index fc7b102..1a30620 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java
@@ -38,7 +38,7 @@ public class TestGroupingTableMapper {
/**
* Test GroupingTableMapper class
*/
- @Test
+ @Test (timeout=60000)
public void testGroupingTableMapper() throws Exception {
GroupingTableMapper mapper = new GroupingTableMapper();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
index ecea98e..c8f5ba3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
@@ -179,7 +179,7 @@ public class TestHFileOutputFormat {
* passed a keyvalue whose timestamp is {@link HConstants#LATEST_TIMESTAMP}.
* @see HBASE-2615
*/
- @Test
+ @Test (timeout=300000)
public void test_LATEST_TIMESTAMP_isReplaced()
throws Exception {
Configuration conf = new Configuration(this.util.getConfiguration());
@@ -229,7 +229,7 @@ public class TestHFileOutputFormat {
* Test that {@link HFileOutputFormat} creates an HFile with TIMERANGE
* metadata used by time-restricted scans.
*/
- @Test
+ @Test (timeout=300000)
public void test_TIMERANGE() throws Exception {
Configuration conf = new Configuration(this.util.getConfiguration());
RecordWriter writer = null;
@@ -295,7 +295,7 @@ public class TestHFileOutputFormat {
/**
* Run small MR job.
*/
- @Test
+ @Test (timeout=300000)
public void testWritingPEData() throws Exception {
Configuration conf = util.getConfiguration();
Path testDir = util.getDataTestDirOnTestFS("testWritingPEData");
@@ -333,7 +333,7 @@ public class TestHFileOutputFormat {
assertTrue(files.length > 0);
}
- @Test
+ @Test (timeout=300000)
public void testJobConfiguration() throws Exception {
Job job = new Job(util.getConfiguration());
job.setWorkingDirectory(util.getDataTestDir("testJobConfiguration"));
@@ -366,13 +366,13 @@ public class TestHFileOutputFormat {
return ret;
}
- @Test
+ @Test (timeout=300000)
public void testMRIncrementalLoad() throws Exception {
LOG.info("\nStarting test testMRIncrementalLoad\n");
doIncrementalLoadTest(false);
}
- @Test
+ @Test (timeout=300000)
public void testMRIncrementalLoadWithSplit() throws Exception {
LOG.info("\nStarting test testMRIncrementalLoadWithSplit\n");
doIncrementalLoadTest(true);
@@ -501,7 +501,7 @@ public class TestHFileOutputFormat {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=300000)
public void testSerializeDeserializeFamilyCompressionMap() throws IOException {
for (int numCfs = 0; numCfs <= 3; numCfs++) {
Configuration conf = new Configuration(this.util.getConfiguration());
@@ -571,7 +571,7 @@ public class TestHFileOutputFormat {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=300000)
public void testSerializeDeserializeFamilyBloomTypeMap() throws IOException {
for (int numCfs = 0; numCfs <= 2; numCfs++) {
Configuration conf = new Configuration(this.util.getConfiguration());
@@ -642,7 +642,7 @@ public class TestHFileOutputFormat {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=300000)
public void testSerializeDeserializeFamilyBlockSizeMap() throws IOException {
for (int numCfs = 0; numCfs <= 3; numCfs++) {
Configuration conf = new Configuration(this.util.getConfiguration());
@@ -718,7 +718,7 @@ public class TestHFileOutputFormat {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=300000)
public void testSerializeDeserializeFamilyDataBlockEncodingMap() throws IOException {
for (int numCfs = 0; numCfs <= 3; numCfs++) {
Configuration conf = new Configuration(this.util.getConfiguration());
@@ -798,7 +798,7 @@ public class TestHFileOutputFormat {
* Test that {@link HFileOutputFormat} RecordWriter uses compression and
* bloom filter settings from the column family descriptor
*/
- @Test
+ @Test (timeout=300000)
public void testColumnFamilySettings() throws Exception {
Configuration conf = new Configuration(this.util.getConfiguration());
RecordWriter writer = null;
@@ -900,7 +900,7 @@ public class TestHFileOutputFormat {
* Without the fix of HBASE-6901, an ArrayIndexOutOfBoundsException
* will be thrown.
*/
- @Ignore ("Flakey: See HBASE-9051") @Test
+ @Ignore ("Flakey: See HBASE-9051") @Test (timeout=300000)
public void testExcludeAllFromMinorCompaction() throws Exception {
Configuration conf = util.getConfiguration();
conf.setInt("hbase.hstore.compaction.min", 2);
@@ -967,7 +967,7 @@ public class TestHFileOutputFormat {
}
}
- @Test
+ @Test (timeout=300000)
public void testExcludeMinorCompaction() throws Exception {
Configuration conf = util.getConfiguration();
conf.setInt("hbase.hstore.compaction.min", 2);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index 0f60f3b..e7c8f0d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -179,7 +179,7 @@ public class TestHFileOutputFormat2 {
* passed a keyvalue whose timestamp is {@link HConstants#LATEST_TIMESTAMP}.
* @see HBASE-2615
*/
- @Test
+ @Test (timeout=300000)
public void test_LATEST_TIMESTAMP_isReplaced()
throws Exception {
Configuration conf = new Configuration(this.util.getConfiguration());
@@ -231,7 +231,7 @@ public class TestHFileOutputFormat2 {
* Test that {@link HFileOutputFormat2} creates an HFile with TIMERANGE
* metadata used by time-restricted scans.
*/
- @Test
+ @Test (timeout=300000)
public void test_TIMERANGE() throws Exception {
Configuration conf = new Configuration(this.util.getConfiguration());
RecordWriter writer = null;
@@ -297,7 +297,7 @@ public class TestHFileOutputFormat2 {
/**
* Run small MR job.
*/
- @Test
+ @Test (timeout=300000)
public void testWritingPEData() throws Exception {
Configuration conf = util.getConfiguration();
Path testDir = util.getDataTestDirOnTestFS("testWritingPEData");
@@ -335,7 +335,7 @@ public class TestHFileOutputFormat2 {
assertTrue(files.length > 0);
}
- @Test
+ @Test (timeout=300000)
public void testJobConfiguration() throws Exception {
Job job = new Job(util.getConfiguration());
job.setWorkingDirectory(util.getDataTestDir("testJobConfiguration"));
@@ -368,13 +368,13 @@ public class TestHFileOutputFormat2 {
return ret;
}
- @Test
+ @Test (timeout=300000)
public void testMRIncrementalLoad() throws Exception {
LOG.info("\nStarting test testMRIncrementalLoad\n");
doIncrementalLoadTest(false);
}
- @Test
+ @Test (timeout=300000)
public void testMRIncrementalLoadWithSplit() throws Exception {
LOG.info("\nStarting test testMRIncrementalLoadWithSplit\n");
doIncrementalLoadTest(true);
@@ -501,7 +501,7 @@ public class TestHFileOutputFormat2 {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=300000)
public void testSerializeDeserializeFamilyCompressionMap() throws IOException {
for (int numCfs = 0; numCfs <= 3; numCfs++) {
Configuration conf = new Configuration(this.util.getConfiguration());
@@ -572,7 +572,7 @@ public class TestHFileOutputFormat2 {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=300000)
public void testSerializeDeserializeFamilyBloomTypeMap() throws IOException {
for (int numCfs = 0; numCfs <= 2; numCfs++) {
Configuration conf = new Configuration(this.util.getConfiguration());
@@ -643,7 +643,7 @@ public class TestHFileOutputFormat2 {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=300000)
public void testSerializeDeserializeFamilyBlockSizeMap() throws IOException {
for (int numCfs = 0; numCfs <= 3; numCfs++) {
Configuration conf = new Configuration(this.util.getConfiguration());
@@ -718,7 +718,7 @@ public class TestHFileOutputFormat2 {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=300000)
public void testSerializeDeserializeFamilyDataBlockEncodingMap() throws IOException {
for (int numCfs = 0; numCfs <= 3; numCfs++) {
Configuration conf = new Configuration(this.util.getConfiguration());
@@ -799,7 +799,7 @@ public class TestHFileOutputFormat2 {
* Test that {@link HFileOutputFormat2} RecordWriter uses compression and
* bloom filter settings from the column family descriptor
*/
- @Test
+ @Test (timeout=300000)
public void testColumnFamilySettings() throws Exception {
Configuration conf = new Configuration(this.util.getConfiguration());
RecordWriter writer = null;
@@ -901,7 +901,7 @@ public class TestHFileOutputFormat2 {
* Without the fix of HBASE-6901, an ArrayIndexOutOfBoundsException
* will be thrown.
*/
- @Ignore ("Flakey: See HBASE-9051") @Test
+ @Ignore ("Flakey: See HBASE-9051") @Test (timeout=300000)
public void testExcludeAllFromMinorCompaction() throws Exception {
Configuration conf = util.getConfiguration();
conf.setInt("hbase.hstore.compaction.min", 2);
@@ -971,7 +971,7 @@ public class TestHFileOutputFormat2 {
}
}
- @Test
+ @Test (timeout=300000)
public void testExcludeMinorCompaction() throws Exception {
Configuration conf = util.getConfiguration();
conf.setInt("hbase.hstore.compaction.min", 2);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
index 935d462..e513ce0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
@@ -159,7 +159,7 @@ public class TestImportExport {
* Test simple replication case with column mapping
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testSimpleCase() throws Exception {
String EXPORT_TABLE = "exportSimpleCase";
Table t = UTIL.createTable(TableName.valueOf(EXPORT_TABLE), FAMILYA, 3);
@@ -205,7 +205,7 @@ public class TestImportExport {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testMetaExport() throws Exception {
String EXPORT_TABLE = TableName.META_TABLE_NAME.getNameAsString();
String[] args = new String[] { EXPORT_TABLE, FQ_OUTPUT_DIR, "1", "0", "0" };
@@ -216,7 +216,7 @@ public class TestImportExport {
* Test import data from 0.94 exported file
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testImport94Table() throws Exception {
URL url = TestImportExport.class.getResource(
"exportedTableIn94Format");
@@ -247,7 +247,7 @@ public class TestImportExport {
/**
* Test export scanner batching
*/
- @Test
+ @Test (timeout=180000)
public void testExportScannerBatching() throws Exception {
String BATCH_TABLE = "exportWithBatch";
HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(BATCH_TABLE));
@@ -277,7 +277,7 @@ public class TestImportExport {
t.close();
}
- @Test
+ @Test (timeout=180000)
public void testWithDeletes() throws Exception {
String EXPORT_TABLE = "exportWithDeletes";
HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(EXPORT_TABLE));
@@ -342,7 +342,7 @@ public class TestImportExport {
}
- @Test
+ @Test (timeout=180000)
public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Exception {
TableName EXPORT_TABLE =
TableName.valueOf("exportWithMultipleDeleteFamilyMarkersOfSameRowSameFamily");
@@ -423,7 +423,7 @@ public class TestImportExport {
* Create a simple table, run an Export Job on it, Import with filtering on, verify counts,
* attempt with invalid values.
*/
- @Test
+ @Test (timeout=180000)
public void testWithFilter() throws Exception {
// Create simple table to export
String EXPORT_TABLE = "exportSimpleCase_ImportWithFilter";
@@ -504,7 +504,7 @@ public class TestImportExport {
/**
* test main method. Import should print help and call System.exit
*/
- @Test
+ @Test (timeout=180000)
public void testImportMain() throws Exception {
PrintStream oldPrintStream = System.err;
SecurityManager SECURITY_MANAGER = System.getSecurityManager();
@@ -533,7 +533,7 @@ public class TestImportExport {
/**
* test main method. Export should print help and call System.exit
*/
- @Test
+ @Test (timeout=180000)
public void testExportMain() throws Exception {
PrintStream oldPrintStream = System.err;
SecurityManager SECURITY_MANAGER = System.getSecurityManager();
@@ -568,7 +568,7 @@ public class TestImportExport {
* Test map method of Importer
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
- @Test
+ @Test (timeout=180000)
public void testKeyValueImporter() throws Exception {
KeyValueImporter importer = new KeyValueImporter();
Configuration configuration = new Configuration();
@@ -603,7 +603,7 @@ public class TestImportExport {
* Test addFilterAndArguments method of Import This method set couple
* parameters into Configuration
*/
- @Test
+ @Test (timeout=180000)
public void testAddFilterAndArguments() throws IOException {
Configuration configuration = new Configuration();
@@ -617,7 +617,7 @@ public class TestImportExport {
assertEquals("param1,param2", configuration.get(Import.FILTER_ARGS_CONF_KEY));
}
- @Test
+ @Test (timeout=180000)
public void testDurability() throws IOException, InterruptedException, ClassNotFoundException {
// Create an export table.
String exportTableName = "exporttestDurability";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
index 8bd6771..c6854b5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
@@ -111,7 +111,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable {
util.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=300000)
public void testMROnTable() throws Exception {
String tableName = "test-" + UUID.randomUUID();
@@ -127,7 +127,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable {
util.deleteTable(tableName);
}
- @Test
+ @Test (timeout=300000)
public void testMROnTableWithInvalidOperationAttr() throws Exception {
String tableName = "test-" + UUID.randomUUID();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java
index a5cceb0..c7b5dfd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java
@@ -100,7 +100,7 @@ public class TestImportTSVWithTTLs implements Configurable {
util.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=300000)
public void testMROnTable() throws Exception {
String tableName = "test-" + UUID.randomUUID();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
index 6754ce9..2be4513 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
@@ -150,7 +150,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
util.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=300000)
public void testMROnTable() throws Exception {
String tableName = "test-" + UUID.randomUUID();
@@ -166,7 +166,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
util.deleteTable(tableName);
}
- @Test
+ @Test (timeout=300000)
public void testMROnTableWithDeletes() throws Exception {
TableName tableName = TableName.valueOf("test-" + UUID.randomUUID());
@@ -218,7 +218,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
assertTrue(verified);
}
- @Test
+ @Test (timeout=300000)
public void testMROnTableWithBulkload() throws Exception {
String tableName = "test-" + UUID.randomUUID();
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName), "hfiles");
@@ -234,7 +234,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
util.deleteTable(tableName);
}
- @Test
+ @Test (timeout=300000)
public void testBulkOutputWithTsvImporterTextMapper() throws Exception {
String table = "test-" + UUID.randomUUID();
String FAMILY = "FAM";
@@ -254,7 +254,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
util.deleteTable(table);
}
- @Test
+ @Test (timeout=300000)
public void testMRWithOutputFormat() throws Exception {
String tableName = "test-" + UUID.randomUUID();
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName), "hfiles");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
index 9e2e4be..d4e3cb7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
@@ -104,7 +104,7 @@ public class TestImportTsv implements Configurable {
util.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=300000)
public void testMROnTable() throws Exception {
String table = "test-" + UUID.randomUUID();
@@ -120,7 +120,7 @@ public class TestImportTsv implements Configurable {
util.deleteTable(table);
}
- @Test
+ @Test (timeout=300000)
public void testMROnTableWithTimestamp() throws Exception {
String table = "test-" + UUID.randomUUID();
@@ -139,7 +139,7 @@ public class TestImportTsv implements Configurable {
}
- @Test
+ @Test (timeout=300000)
public void testMROnTableWithCustomMapper()
throws Exception {
String table = "test-" + UUID.randomUUID();
@@ -155,7 +155,7 @@ public class TestImportTsv implements Configurable {
util.deleteTable(table);
}
- @Test
+ @Test (timeout=300000)
public void testBulkOutputWithoutAnExistingTable() throws Exception {
String table = "test-" + UUID.randomUUID();
@@ -172,7 +172,7 @@ public class TestImportTsv implements Configurable {
util.deleteTable(table);
}
- @Test
+ @Test (timeout=300000)
public void testBulkOutputWithAnExistingTable() throws Exception {
String table = "test-" + UUID.randomUUID();
@@ -190,7 +190,7 @@ public class TestImportTsv implements Configurable {
util.deleteTable(table);
}
- @Test
+ @Test (timeout=300000)
public void testBulkOutputWithAnExistingTableNoStrictTrue() throws Exception {
String table = "test-" + UUID.randomUUID();
// Prepare the arguments required for the test.
@@ -207,7 +207,7 @@ public class TestImportTsv implements Configurable {
util.deleteTable(table);
}
- @Test
+ @Test (timeout=300000)
public void testJobConfigurationsWithTsvImporterTextMapper() throws Exception {
String table = "test-" + UUID.randomUUID();
Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table),"hfiles");
@@ -231,7 +231,7 @@ public class TestImportTsv implements Configurable {
assertTrue(job.getMapOutputValueClass().equals(Text.class));
}
- @Test
+ @Test (timeout=300000)
public void testBulkOutputWithTsvImporterTextMapper() throws Exception {
String table = "test-" + UUID.randomUUID();
String FAMILY = "FAM";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java
index 81e0a70..b2a727c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java
@@ -63,7 +63,7 @@ public class TestImportTsvParser {
}
}
- @Test
+ @Test (timeout=60000)
public void testTsvParserSpecParsing() {
TsvParser parser;
@@ -129,7 +129,7 @@ public class TestImportTsvParser {
assertEquals(0, parser.getAttributesKeyColumnIndex());
}
- @Test
+ @Test (timeout=60000)
public void testTsvParser() throws BadTsvLineException {
TsvParser parser = new TsvParser("col_a,col_b:qual,HBASE_ROW_KEY,col_d", "\t");
assertBytesEquals(Bytes.toBytes("col_a"), parser.getFamily(0));
@@ -147,7 +147,7 @@ public class TestImportTsvParser {
checkParsing(parsed, Splitter.on("\t").split(Bytes.toString(line)));
}
- @Test
+ @Test (timeout=60000)
public void testTsvParserWithTimestamp() throws BadTsvLineException {
TsvParser parser = new TsvParser("HBASE_ROW_KEY,HBASE_TS_KEY,col_a,", "\t");
assertNull(parser.getFamily(0));
@@ -214,7 +214,7 @@ public class TestImportTsvParser {
parser.parse(line, line.length);
}
- @Test
+ @Test (timeout=60000)
public void testTsvParserParseRowKey() throws BadTsvLineException {
TsvParser parser = new TsvParser("HBASE_ROW_KEY,col_a,HBASE_TS_KEY", "\t");
assertEquals(0, parser.getRowKeyColumnIndex());
@@ -250,7 +250,7 @@ public class TestImportTsvParser {
assertEquals(6, rowKeyOffsets.getSecond().intValue());
}
- @Test
+ @Test (timeout=60000)
public void testTsvParseAttributesKey() throws BadTsvLineException {
TsvParser parser = new TsvParser("HBASE_ROW_KEY,col_a,HBASE_TS_KEY,HBASE_ATTRIBUTES_KEY", "\t");
assertEquals(0, parser.getRowKeyColumnIndex());
@@ -296,7 +296,7 @@ public class TestImportTsvParser {
}
}
- @Test
+ @Test (timeout=60000)
public void testTsvParserWithCellVisibilityCol() throws BadTsvLineException {
TsvParser parser = new TsvParser(
"HBASE_ROW_KEY,col_a,HBASE_TS_KEY,HBASE_ATTRIBUTES_KEY,HBASE_CELL_VISIBILITY", "\t");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java
index 8187b73..c8db8e2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java
@@ -44,7 +44,7 @@ import java.util.jar.Manifest;
@Category(SmallTests.class)
public class TestJarFinder {
- @Test
+ @Test (timeout=60000)
public void testJar() throws Exception {
//picking a class that is for sure in a JAR in the classpath
@@ -75,7 +75,7 @@ public class TestJarFinder {
}
}
- @Test
+ @Test (timeout=60000)
public void testExpandedClasspath() throws Exception {
//picking a class that is for sure in a directory in the classpath
//in this case the JAR is created on the fly
@@ -83,7 +83,7 @@ public class TestJarFinder {
Assert.assertTrue(new File(jar).exists());
}
- @Test
+ @Test (timeout=60000)
public void testExistingManifest() throws Exception {
File dir = new File(System.getProperty("test.build.dir", "target/test-dir"),
TestJarFinder.class.getName() + "-testExistingManifest");
@@ -111,7 +111,7 @@ public class TestJarFinder {
jis.close();
}
- @Test
+ @Test (timeout=60000)
public void testNoManifest() throws Exception {
File dir = new File(System.getProperty("test.build.dir", "target/test-dir"),
TestJarFinder.class.getName() + "-testNoManifest");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java
index 3226cc6..8a43b57 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java
@@ -175,25 +175,25 @@ public class TestMultiTableInputFormat {
}
}
- @Test
+ @Test (timeout=300000)
public void testScanEmptyToEmpty() throws IOException, InterruptedException,
ClassNotFoundException {
testScan(null, null, null);
}
- @Test
+ @Test (timeout=300000)
public void testScanEmptyToAPP() throws IOException, InterruptedException,
ClassNotFoundException {
testScan(null, "app", "apo");
}
- @Test
+ @Test (timeout=300000)
public void testScanOBBToOPP() throws IOException, InterruptedException,
ClassNotFoundException {
testScan("obb", "opp", "opo");
}
- @Test
+ @Test (timeout=300000)
public void testScanYZYToEmpty() throws IOException, InterruptedException,
ClassNotFoundException {
testScan("yzy", null, "zzz");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
index 6180632..fca752e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
@@ -127,7 +127,7 @@ public class TestMultithreadedTableMapper {
* @throws ClassNotFoundException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=300000)
public void testMultithreadedTableMapper()
throws IOException, InterruptedException, ClassNotFoundException {
runTestOnTable(UTIL.getConnection().getTable(MULTI_REGION_TABLE_NAME));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
index 59854ee..cc297cb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
@@ -91,7 +91,7 @@ public class TestRowCounter {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testRowCounterNoColumn() throws Exception {
String[] args = new String[] {
TABLE_NAME
@@ -105,7 +105,7 @@ public class TestRowCounter {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testRowCounterExclusiveColumn() throws Exception {
String[] args = new String[] {
TABLE_NAME, COL_FAM + ":" + COL1
@@ -119,7 +119,7 @@ public class TestRowCounter {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testRowCounterColumnWithColonInQualifier() throws Exception {
String[] args = new String[] {
TABLE_NAME, COL_FAM + ":" + COMPOSITE_COLUMN
@@ -133,7 +133,7 @@ public class TestRowCounter {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testRowCounterHiddenColumn() throws Exception {
String[] args = new String[] {
TABLE_NAME, COL_FAM + ":" + COL2
@@ -146,7 +146,7 @@ public class TestRowCounter {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testRowCounterTimeRange() throws Exception {
final byte[] family = Bytes.toBytes(COL_FAM);
final byte[] col1 = Bytes.toBytes(COL1);
@@ -257,7 +257,7 @@ public class TestRowCounter {
/**
* test main method. Import should print help and call System.exit
*/
- @Test
+ @Test (timeout=180000)
public void testImportMain() throws Exception {
PrintStream oldPrintStream = System.err;
SecurityManager SECURITY_MANAGER = System.getSecurityManager();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSimpleTotalOrderPartitioner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSimpleTotalOrderPartitioner.java
index 119df80..86384d3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSimpleTotalOrderPartitioner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSimpleTotalOrderPartitioner.java
@@ -38,7 +38,7 @@ public class TestSimpleTotalOrderPartitioner {
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
Configuration conf = TEST_UTIL.getConfiguration();
- @Test
+ @Test (timeout=60000)
public void testSplit() throws Exception {
String start = "a";
String end = "{";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
index 566a642..1d12487 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
@@ -270,7 +270,7 @@ public class TestTableInputFormat {
* @throws IOException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=300000)
public void testTableRecordReaderMapreduce() throws IOException,
InterruptedException {
Table table = createTable("table1-mr".getBytes());
@@ -283,7 +283,7 @@ public class TestTableInputFormat {
* @throws IOException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=300000)
public void testTableRecordReaderScannerFailMapreduce() throws IOException,
InterruptedException {
Table htable = createIOEScannerTable("table2-mr".getBytes(), 1);
@@ -310,7 +310,7 @@ public class TestTableInputFormat {
* @throws InterruptedException
* @throws org.apache.hadoop.hbase.DoNotRetryIOException
*/
- @Test
+ @Test (timeout=300000)
public void testTableRecordReaderScannerTimeoutMapreduce()
throws IOException, InterruptedException {
Table htable = createDNRIOEScannerTable("table4-mr".getBytes(), 1);
@@ -334,7 +334,7 @@ public class TestTableInputFormat {
/**
* Verify the example we present in javadocs on TableInputFormatBase
*/
- @Test
+ @Test (timeout=300000)
public void testExtensionOfTableInputFormatBase()
throws IOException, InterruptedException, ClassNotFoundException {
LOG.info("testing use of an InputFormat taht extends InputFormatBase");
@@ -343,7 +343,7 @@ public class TestTableInputFormat {
testInputFormat(ExampleTIF.class);
}
- @Test
+ @Test (timeout=300000)
public void testJobConfigurableExtensionOfTableInputFormatBase()
throws IOException, InterruptedException, ClassNotFoundException {
LOG.info("testing use of an InputFormat taht extends InputFormatBase, " +
@@ -353,7 +353,7 @@ public class TestTableInputFormat {
testInputFormat(ExampleJobConfigurableTIF.class);
}
- @Test
+ @Test (timeout=300000)
public void testDeprecatedExtensionOfTableInputFormatBase()
throws IOException, InterruptedException, ClassNotFoundException {
LOG.info("testing use of an InputFormat taht extends InputFormatBase, " +
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java
index c757a2d..48f14a5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java
@@ -32,7 +32,7 @@ import org.junit.experimental.categories.Category;
@Category({SmallTests.class})
public class TestTableInputFormatBase {
- @Test
+ @Test (timeout=60000)
public void testTableInputFormatBaseReverseDNSForIPv6()
throws UnknownHostException, NamingException {
String address = "ipv6.google.com";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan1.java
index 7d8a895..f0b8e08 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan1.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan1.java
@@ -39,7 +39,7 @@ public class TestTableInputFormatScan1 extends TestTableInputFormatScanBase {
* @throws ClassNotFoundException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=300000)
public void testScanEmptyToEmpty()
throws IOException, InterruptedException, ClassNotFoundException {
testScan(null, null, null);
@@ -52,7 +52,7 @@ public class TestTableInputFormatScan1 extends TestTableInputFormatScanBase {
* @throws ClassNotFoundException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=300000)
public void testScanEmptyToAPP()
throws IOException, InterruptedException, ClassNotFoundException {
testScan(null, "app", "apo");
@@ -65,7 +65,7 @@ public class TestTableInputFormatScan1 extends TestTableInputFormatScanBase {
* @throws ClassNotFoundException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=300000)
public void testScanEmptyToBBA()
throws IOException, InterruptedException, ClassNotFoundException {
testScan(null, "bba", "baz");
@@ -78,7 +78,7 @@ public class TestTableInputFormatScan1 extends TestTableInputFormatScanBase {
* @throws ClassNotFoundException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=300000)
public void testScanEmptyToBBB()
throws IOException, InterruptedException, ClassNotFoundException {
testScan(null, "bbb", "bba");
@@ -91,7 +91,7 @@ public class TestTableInputFormatScan1 extends TestTableInputFormatScanBase {
* @throws ClassNotFoundException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=300000)
public void testScanEmptyToOPP()
throws IOException, InterruptedException, ClassNotFoundException {
testScan(null, "opp", "opo");
@@ -109,7 +109,7 @@ public class TestTableInputFormatScan1 extends TestTableInputFormatScanBase {
* @throws ClassNotFoundException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=300000)
public void testGetSplits() throws IOException, InterruptedException, ClassNotFoundException {
testNumOfSplits("-1", 52);
testNumOfSplits("100", 1);
@@ -122,7 +122,7 @@ public class TestTableInputFormatScan1 extends TestTableInputFormatScanBase {
* @throws ClassNotFoundException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=300000)
public void testGetSplitsPoint() throws IOException, InterruptedException,
ClassNotFoundException {
// Test Case 1: "aaabcdef" and "aaaff", split point is "aaad".
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan2.java
index 02f893f..fcb43dc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan2.java
@@ -39,7 +39,7 @@ public class TestTableInputFormatScan2 extends TestTableInputFormatScanBase {
* @throws ClassNotFoundException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=300000)
public void testScanOBBToOPP()
throws IOException, InterruptedException, ClassNotFoundException {
testScan("obb", "opp", "opo");
@@ -52,7 +52,7 @@ public class TestTableInputFormatScan2 extends TestTableInputFormatScanBase {
* @throws ClassNotFoundException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=300000)
public void testScanOBBToQPP()
throws IOException, InterruptedException, ClassNotFoundException {
testScan("obb", "qpp", "qpo");
@@ -65,7 +65,7 @@ public class TestTableInputFormatScan2 extends TestTableInputFormatScanBase {
* @throws ClassNotFoundException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=300000)
public void testScanOPPToEmpty()
throws IOException, InterruptedException, ClassNotFoundException {
testScan("opp", null, "zzz");
@@ -78,7 +78,7 @@ public class TestTableInputFormatScan2 extends TestTableInputFormatScanBase {
* @throws ClassNotFoundException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=300000)
public void testScanYYXToEmpty()
throws IOException, InterruptedException, ClassNotFoundException {
testScan("yyx", null, "zzz");
@@ -91,7 +91,7 @@ public class TestTableInputFormatScan2 extends TestTableInputFormatScanBase {
* @throws ClassNotFoundException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=300000)
public void testScanYYYToEmpty()
throws IOException, InterruptedException, ClassNotFoundException {
testScan("yyy", null, "zzz");
@@ -104,13 +104,13 @@ public class TestTableInputFormatScan2 extends TestTableInputFormatScanBase {
* @throws ClassNotFoundException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=300000)
public void testScanYZYToEmpty()
throws IOException, InterruptedException, ClassNotFoundException {
testScan("yzy", null, "zzz");
}
- @Test
+ @Test (timeout=300000)
public void testScanFromConfiguration()
throws IOException, InterruptedException, ClassNotFoundException {
testScanFromConfiguration("bba", "bbd", "bbc");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java
index 303a144..023a2a6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java
@@ -40,7 +40,7 @@ public class TestTableMapReduceUtil {
* the method depends on an online cluster.
*/
- @Test
+ @Test (timeout=60000)
public void testInitTableMapperJob1() throws Exception {
Configuration configuration = new Configuration();
Job job = new Job(configuration, "tableName");
@@ -55,7 +55,7 @@ public class TestTableMapReduceUtil {
assertEquals("Table", job.getConfiguration().get(TableInputFormat.INPUT_TABLE));
}
- @Test
+ @Test (timeout=60000)
public void testInitTableMapperJob2() throws Exception {
Configuration configuration = new Configuration();
Job job = new Job(configuration, "tableName");
@@ -69,7 +69,7 @@ public class TestTableMapReduceUtil {
assertEquals("Table", job.getConfiguration().get(TableInputFormat.INPUT_TABLE));
}
- @Test
+ @Test (timeout=60000)
public void testInitTableMapperJob3() throws Exception {
Configuration configuration = new Configuration();
Job job = new Job(configuration, "tableName");
@@ -83,7 +83,7 @@ public class TestTableMapReduceUtil {
assertEquals("Table", job.getConfiguration().get(TableInputFormat.INPUT_TABLE));
}
- @Test
+ @Test (timeout=60000)
public void testInitTableMapperJob4() throws Exception {
Configuration configuration = new Configuration();
Job job = new Job(configuration, "tableName");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java
index 8d7e2d3..9bdbacd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java
@@ -71,7 +71,7 @@ public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBa
public void tearDown() throws Exception {
}
- @Test
+ @Test (timeout=300000)
public void testGetBestLocations() throws IOException {
TableSnapshotInputFormatImpl tsif = new TableSnapshotInputFormatImpl();
Configuration conf = UTIL.getConfiguration();
@@ -147,7 +147,7 @@ public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBa
}
}
- @Test
+ @Test (timeout=300000)
public void testInitTableSnapshotMapperJobConfig() throws Exception {
setupCluster();
TableName tableName = TableName.valueOf("testInitTableSnapshotMapperJobConfig");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java
index 59f787f..1b45125 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java
@@ -33,7 +33,7 @@ import static org.junit.Assert.assertTrue;
@Category({MapReduceTests.class, SmallTests.class})
public class TestTableSplit {
- @Test
+ @Test (timeout=60000)
public void testHashCode() {
TableSplit split1 = new TableSplit(TableName.valueOf("table"),
"row-start".getBytes(),
@@ -52,7 +52,7 @@ public class TestTableSplit {
/**
* length of region should not influence hashcode
* */
- @Test
+ @Test (timeout=60000)
public void testHashCode_length() {
TableSplit split1 = new TableSplit(TableName.valueOf("table"),
"row-start".getBytes(),
@@ -72,7 +72,7 @@ public class TestTableSplit {
/**
* Length of region need to be properly serialized.
* */
- @Test
+ @Test (timeout=60000)
public void testLengthIsSerialized() throws Exception {
TableSplit split1 = new TableSplit(TableName.valueOf("table"),
"row-start".getBytes(),
@@ -86,7 +86,7 @@ public class TestTableSplit {
Assert.assertEquals(666, deserialized.getLength());
}
- @Test
+ @Test (timeout=60000)
public void testToString() {
TableSplit split =
new TableSplit(TableName.valueOf("table"), "row-start".getBytes(), "row-end".getBytes(),
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
index 03da1ed..31617b5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
@@ -144,7 +144,7 @@ public class TestTimeRangeMapRed {
}
}
- @Test
+ @Test (timeout=300000)
public void testTimeRangeMapRed()
throws IOException, InterruptedException, ClassNotFoundException {
final HTableDescriptor desc = new HTableDescriptor(TABLE_NAME);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
index 68cf8ba..f235641 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
@@ -85,7 +85,7 @@ public class TestWALPlayer {
* Simple end-to-end test
* @throws Exception
*/
- @Test
+ @Test (timeout=300000)
public void testWALPlayer() throws Exception {
final TableName TABLENAME1 = TableName.valueOf("testWALPlayer1");
final TableName TABLENAME2 = TableName.valueOf("testWALPlayer2");
@@ -132,12 +132,12 @@ public class TestWALPlayer {
/**
* Test WALKeyValueMapper setup and map
*/
- @Test
+ @Test (timeout=300000)
public void testWALKeyValueMapper() throws Exception {
testWALKeyValueMapper(WALPlayer.TABLES_KEY);
}
- @Test
+ @Test (timeout=300000)
public void testWALKeyValueMapperWithDeprecatedConfig() throws Exception {
testWALKeyValueMapper("hlog.input.tables");
}
@@ -181,7 +181,7 @@ public class TestWALPlayer {
/**
* Test main method
*/
- @Test
+ @Test (timeout=300000)
public void testMainMethod() throws Exception {
PrintStream oldPrintStream = System.err;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java
index d9fe0d0..41c4c65 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java
@@ -116,7 +116,7 @@ public class TestWALRecordReader {
* Test partial reads from the log based on passed time range
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testPartialRead() throws Exception {
final WALFactory walfactory = new WALFactory(conf, null, getName());
WAL log = walfactory.getWAL(info.getEncodedNameAsBytes());
@@ -179,7 +179,7 @@ public class TestWALRecordReader {
* Test basic functionality
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testWALRecordReader() throws Exception {
final WALFactory walfactory = new WALFactory(conf, null, getName());
WAL log = walfactory.getWAL(info.getEncodedNameAsBytes());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java
index e3283e9..52e4209 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java
@@ -109,7 +109,7 @@ public class TestActiveMasterManager {
* but rather acts directly on ZK.
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testActiveMasterManagerFromZK() throws Exception {
ZooKeeperWatcher zk = new ZooKeeperWatcher(TEST_UTIL.getConfiguration(),
"testActiveMasterManagerFromZK", null, true);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
index 8ed49ff..d7676a3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
@@ -482,7 +482,7 @@ public class TestCatalogJanitor {
}
}
- @Test
+ @Test (timeout=60000)
public void testCleanParent() throws IOException, InterruptedException {
HBaseTestingUtility htu = new HBaseTestingUtility();
setRootDirAndCleanIt(htu, "testCleanParent");
@@ -532,7 +532,7 @@ public class TestCatalogJanitor {
* @throws IOException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=60000)
public void testParentCleanedEvenIfDaughterGoneFirst()
throws IOException, InterruptedException {
parentWithSpecifiedEndKeyCleanedEvenIfDaughterGoneFirst(
@@ -544,7 +544,7 @@ public class TestCatalogJanitor {
* @throws IOException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=60000)
public void testLastParentCleanedEvenIfDaughterGoneFirst()
throws IOException, InterruptedException {
parentWithSpecifiedEndKeyCleanedEvenIfDaughterGoneFirst(
@@ -662,7 +662,7 @@ public class TestCatalogJanitor {
* parents are still referencing them. This ensures that grandfather regions
* do not point to deleted parent regions.
*/
- @Test
+ @Test (timeout=60000)
public void testScanDoesNotCleanRegionsWithExistingParents() throws Exception {
HBaseTestingUtility htu = new HBaseTestingUtility();
setRootDirAndCleanIt(htu, "testScanDoesNotCleanRegionsWithExistingParents");
@@ -729,7 +729,7 @@ public class TestCatalogJanitor {
* Test that we correctly archive all the storefiles when a region is deleted
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testSplitParentFirstComparator() {
SplitParentFirstComparator comp = new SplitParentFirstComparator();
final HTableDescriptor htd = createHTableDescriptor();
@@ -819,7 +819,7 @@ public class TestCatalogJanitor {
}
- @Test
+ @Test (timeout=60000)
public void testArchiveOldRegion() throws Exception {
String table = "table";
HBaseTestingUtility htu = new HBaseTestingUtility();
@@ -903,7 +903,7 @@ public class TestCatalogJanitor {
* Test that if a store file with the same name is present as those already backed up cause the
* already archived files to be timestamped backup
*/
- @Test
+ @Test (timeout=60000)
public void testDuplicateHFileResolution() throws Exception {
String table = "table";
HBaseTestingUtility htu = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java
index dd733ad..28e4fd1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java
@@ -44,7 +44,7 @@ public class TestClockSkewDetection {
private static final Log LOG =
LogFactory.getLog(TestClockSkewDetection.class);
- @Test
+ @Test (timeout=60000)
public void testClockSkewDetection() throws Exception {
final Configuration conf = HBaseConfiguration.create();
ServerManager sm = new ServerManager(new Server() {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClusterStatusPublisher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClusterStatusPublisher.java
index 5d47ede..0ab5dc7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClusterStatusPublisher.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClusterStatusPublisher.java
@@ -44,7 +44,7 @@ public class TestClusterStatusPublisher {
EnvironmentEdgeManager.injectEdge(mee);
}
- @Test
+ @Test (timeout=180000)
public void testEmpty() {
ClusterStatusPublisher csp = new ClusterStatusPublisher() {
@Override
@@ -56,7 +56,7 @@ public class TestClusterStatusPublisher {
Assert.assertTrue(csp.generateDeadServersListToSend().isEmpty());
}
- @Test
+ @Test (timeout=180000)
public void testMaxSend() {
ClusterStatusPublisher csp = new ClusterStatusPublisher() {
@Override
@@ -82,7 +82,7 @@ public class TestClusterStatusPublisher {
Assert.assertTrue(csp.generateDeadServersListToSend().isEmpty());
}
- @Test
+ @Test (timeout=180000)
public void testOrder() {
ClusterStatusPublisher csp = new ClusterStatusPublisher() {
@Override
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDeadServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDeadServer.java
index 40d26f4..906d5b6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDeadServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDeadServer.java
@@ -76,7 +76,7 @@ public class TestDeadServer {
}
- @Test
+ @Test (timeout=180000)
public void testSortExtract(){
ManualEnvironmentEdge mee = new ManualEnvironmentEdge();
EnvironmentEdgeManager.injectEdge(mee);
@@ -103,7 +103,7 @@ public class TestDeadServer {
EnvironmentEdgeManager.reset();
}
- @Test
+ @Test (timeout=180000)
public void testClean(){
DeadServer d = new DeadServer();
d.add(hostname123);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetInfoPort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetInfoPort.java
index 418bddc..3bb3a09 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetInfoPort.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetInfoPort.java
@@ -47,7 +47,7 @@ public class TestGetInfoPort {
testUtil.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void test() {
assertTrue(testUtil.getMiniHBaseCluster().getRegionServer(0).getMasterAddressTracker()
.getMasterInfoPort() > 0);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetLastFlushedSequenceId.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetLastFlushedSequenceId.java
index 0f7c281..b9a5f18 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetLastFlushedSequenceId.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetLastFlushedSequenceId.java
@@ -67,7 +67,7 @@ public class TestGetLastFlushedSequenceId {
testUtil.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void test() throws IOException, InterruptedException {
testUtil.getHBaseAdmin().createNamespace(
NamespaceDescriptor.create(tableName.getNamespaceAsString()).build());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterCommandLine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterCommandLine.java
index 2cb42f7..03febe7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterCommandLine.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterCommandLine.java
@@ -29,7 +29,7 @@ import org.junit.experimental.categories.Category;
@Category({MasterTests.class, SmallTests.class})
public class TestHMasterCommandLine {
private static final HBaseTestingUtility TESTING_UTIL = new HBaseTestingUtility();
- @Test
+ @Test (timeout=60000)
public void testRun() throws Exception {
HMasterCommandLine masterCommandLine = new HMasterCommandLine(HMaster.class);
masterCommandLine.setConf(TESTING_UTIL.getConfiguration());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java
index 37d6940..411f62a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java
@@ -87,7 +87,7 @@ public class TestHMasterRPCException {
testUtil.shutdownMiniZKCluster();
}
- @Test
+ @Test (timeout=180000)
public void testRPCException() throws IOException, InterruptedException, KeeperException {
ServerName sm = master.getServerName();
boolean fakeZNodeDelete = false;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java
index 8028756..9046a8d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java
@@ -77,7 +77,7 @@ public class TestMaster {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
@SuppressWarnings("deprecation")
public void testMasterOpsWhileSplitting() throws Exception {
MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
@@ -123,7 +123,7 @@ public class TestMaster {
assertEquals(tableRegionFromName.getFirst(), pair.getFirst());
}
- @Test
+ @Test (timeout=180000)
public void testMoveRegionWhenNotInitialized() {
MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
HMaster m = cluster.getMaster();
@@ -139,7 +139,7 @@ public class TestMaster {
}
}
- @Test
+ @Test (timeout=180000)
public void testMoveThrowsUnknownRegionException() throws IOException {
TableName tableName =
TableName.valueOf("testMoveThrowsUnknownRegionException");
@@ -160,7 +160,7 @@ public class TestMaster {
}
}
- @Test
+ @Test (timeout=180000)
public void testMoveThrowsPleaseHoldException() throws IOException {
TableName tableName = TableName.valueOf("testMoveThrowsPleaseHoldException");
HMaster master = TEST_UTIL.getMiniHBaseCluster().getMaster();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFileSystem.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFileSystem.java
index 0534643..efcd364 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFileSystem.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFileSystem.java
@@ -64,7 +64,7 @@ public class TestMasterFileSystem {
UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testFsUriSetProperly() throws Exception {
HMaster master = UTIL.getMiniHBaseCluster().getMaster();
MasterFileSystem fs = master.getMasterFileSystem();
@@ -77,7 +77,7 @@ public class TestMasterFileSystem {
assertEquals(masterRoot, rootDir);
}
- @Test
+ @Test (timeout=180000)
public void testRemoveStaleRecoveringRegionsDuringMasterInitialization() throws Exception {
// this test is for when distributed log replay is enabled
if (!UTIL.getConfiguration().getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false)) return;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java
index 8a55ce3..df1c907 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java
@@ -114,7 +114,7 @@ public class TestMasterMetrics {
master.stopMaster();
}
- @Test
+ @Test (timeout=180000)
public void testDefaultMasterMetrics() throws Exception {
MetricsMasterSource masterSource = master.getMasterMetrics().getMetricsSource();
metricsHelper.assertGauge( "numRegionServers", 2, masterSource);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
index 972834a..6c7505d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
@@ -250,7 +250,7 @@ public class TestMasterNoCluster {
}
}
- @Test
+ @Test (timeout=180000)
public void testNotPullingDeadRegionServerFromZK()
throws IOException, KeeperException, InterruptedException {
final Configuration conf = TESTUTIL.getConfiguration();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
index ca9bc9c..1ba6e50 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
@@ -86,7 +86,7 @@ public class TestMasterOperationsForRegionReplicas {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testCreateTableWithSingleReplica() throws Exception {
final int numRegions = 3;
final int numReplica = 1;
@@ -107,7 +107,7 @@ public class TestMasterOperationsForRegionReplicas {
}
}
- @Test
+ @Test (timeout=180000)
public void testCreateTableWithMultipleReplicas() throws Exception {
final TableName table = TableName.valueOf("fooTable");
final int numRegions = 3;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java
index 20e0e54..f74cff3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java
@@ -47,7 +47,7 @@ public class TestMasterRestartAfterDisablingTable {
private static final Log LOG = LogFactory.getLog(TestMasterRestartAfterDisablingTable.class);
- @Test
+ @Test (timeout=300000)
public void testForCheckingIfEnableAndDisableWorksFineAfterSwitch()
throws Exception {
final int NUM_MASTERS = 2;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java
index b23ca78..461092d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java
@@ -121,12 +121,12 @@ public class TestMasterStatusServlet {
Mockito.doReturn(tables).when(admin).listTables();
}
- @Test
+ @Test (timeout=180000)
public void testStatusTemplateNoTables() throws IOException {
new MasterStatusTmpl().render(new StringWriter(), master);
}
- @Test
+ @Test (timeout=180000)
public void testStatusTemplateMetaAvailable() throws IOException {
setupMockTables();
@@ -135,7 +135,7 @@ public class TestMasterStatusServlet {
.render(new StringWriter(), master);
}
- @Test
+ @Test (timeout=180000)
public void testStatusTemplateWithServers() throws IOException {
setupMockTables();
@@ -155,7 +155,7 @@ public class TestMasterStatusServlet {
.render(new StringWriter(), master);
}
- @Test
+ @Test (timeout=180000)
public void testAssignmentManagerTruncatedList() throws IOException {
AssignmentManager am = Mockito.mock(AssignmentManager.class);
RegionStates rs = Mockito.mock(RegionStates.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
index 25dd13e..a8f8822 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
@@ -102,7 +102,7 @@ public class TestRegionPlacement {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testRegionPlacement() throws Exception {
String tableStr = "testRegionAssignment";
TableName table = TableName.valueOf(tableStr);
@@ -247,7 +247,7 @@ public class TestRegionPlacement {
/**
* Used to test the correctness of this class.
*/
- @Test
+ @Test (timeout=180000)
public void testRandomizedMatrix() {
int rows = 100;
int cols = 100;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement2.java
index 3f34bc4..e8537d3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement2.java
@@ -68,7 +68,7 @@ public class TestRegionPlacement2 {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testFavoredNodesPresentForRoundRobinAssignment() throws HBaseIOException {
LoadBalancer balancer = LoadBalancerFactory.getLoadBalancer(TEST_UTIL.getConfiguration());
balancer.setMasterServices(TEST_UTIL.getMiniHBaseCluster().getMaster());
@@ -128,7 +128,7 @@ public class TestRegionPlacement2 {
!favoredNodesNow.contains(favoredNodesAfter.get(TERTIARY)));
}
- @Test
+ @Test (timeout=180000)
public void testFavoredNodesPresentForRandomAssignment() throws HBaseIOException {
LoadBalancer balancer = LoadBalancerFactory.getLoadBalancer(TEST_UTIL.getConfiguration());
balancer.setMasterServices(TEST_UTIL.getMiniHBaseCluster().getMaster());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlan.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlan.java
index 388924b..d255a4c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlan.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlan.java
@@ -30,7 +30,7 @@ import org.junit.experimental.categories.Category;
@Category({MasterTests.class, SmallTests.class})
public class TestRegionPlan {
- @Test
+ @Test (timeout=60000)
public void test() {
HRegionInfo hri = new HRegionInfo(TableName.valueOf("table"));
ServerName source = ServerName.valueOf("source", 1234, 2345);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionState.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionState.java
index d9845e1..e586827 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionState.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionState.java
@@ -29,7 +29,7 @@ import static org.junit.Assert.assertEquals;
@Category({MasterTests.class, SmallTests.class})
public class TestRegionState {
- @Test
+ @Test (timeout=60000)
public void test() {
RegionState state1 = new RegionState(
new HRegionInfo(TableName.valueOf("table")), RegionState.State.OPENING);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionStates.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionStates.java
index 99e1709..7ef2880 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionStates.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionStates.java
@@ -99,7 +99,7 @@ public class TestRegionStates {
latch.await();
}
- @Test
+ @Test (timeout=60000)
public void testWeDontReturnDrainingServersForOurBalancePlans() throws Exception {
MasterServices server = mock(MasterServices.class);
when(server.getServerName()).thenReturn(ServerName.valueOf("master,1,1"));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeAssignmentHelper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeAssignmentHelper.java
index 4dc7d32..c2c78b9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeAssignmentHelper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeAssignmentHelper.java
@@ -98,7 +98,7 @@ public class TestFavoredNodeAssignmentHelper {
return chosenServers;
}
- @Test
+ @Test (timeout=60000)
public void testSmallCluster() {
// Test the case where we cannot assign favored nodes (because the number
// of nodes in the cluster is too less)
@@ -110,7 +110,7 @@ public class TestFavoredNodeAssignmentHelper {
assertFalse(helper.canPlaceFavoredNodes());
}
- @Test
+ @Test (timeout=60000)
public void testPlacePrimaryRSAsRoundRobin() {
// Test the regular case where there are many servers in different racks
// Test once for few regions and once for many regions
@@ -119,7 +119,7 @@ public class TestFavoredNodeAssignmentHelper {
primaryRSPlacement(600, null, 10, 10, 10);
}
- @Test
+ @Test (timeout=60000)
public void testRoundRobinAssignmentsWithUnevenSizedRacks() {
//In the case of uneven racks, the regions should be distributed
//proportionately to the rack sizes
@@ -136,7 +136,7 @@ public class TestFavoredNodeAssignmentHelper {
primaryRSPlacement(459, null, 7, 9, 8);
}
- @Test
+ @Test (timeout=60000)
public void testSecondaryAndTertiaryPlacementWithSingleRack() {
// Test the case where there is a single rack and we need to choose
// Primary/Secondary/Tertiary from a single rack.
@@ -160,7 +160,7 @@ public class TestFavoredNodeAssignmentHelper {
}
}
- @Test
+ @Test (timeout=60000)
public void testSecondaryAndTertiaryPlacementWithSingleServer() {
// Test the case where we have a single node in the cluster. In this case
// the primary can be assigned but the secondary/tertiary would be null
@@ -178,7 +178,7 @@ public class TestFavoredNodeAssignmentHelper {
assertTrue(secondaryAndTertiaryMap.get(regions.get(0)) == null);
}
- @Test
+ @Test (timeout=60000)
public void testSecondaryAndTertiaryPlacementWithMultipleRacks() {
// Test the case where we have multiple racks and the region servers
// belong to multiple racks
@@ -207,7 +207,7 @@ public class TestFavoredNodeAssignmentHelper {
}
}
- @Test
+ @Test (timeout=60000)
public void testSecondaryAndTertiaryPlacementWithLessThanTwoServersInRacks() {
// Test the case where we have two racks but with less than two servers in each
// We will not have enough machines to select secondary/tertiary
@@ -228,7 +228,7 @@ public class TestFavoredNodeAssignmentHelper {
}
}
- @Test
+ @Test (timeout=60000)
public void testSecondaryAndTertiaryPlacementWithMoreThanOneServerInPrimaryRack() {
// Test the case where there is only one server in one rack and another rack
// has more servers. We try to choose secondary/tertiary on different
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestServerAndLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestServerAndLoad.java
index 2cfaf4e..88556fa 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestServerAndLoad.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestServerAndLoad.java
@@ -29,7 +29,7 @@ import org.junit.experimental.categories.Category;
@Category({MasterTests.class, SmallTests.class})
public class TestServerAndLoad {
- @Test
+ @Test (timeout=60000)
public void test() {
ServerName server = ServerName.valueOf("host", 12345, 112244);
int startcode = 12;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer.java
index 000e331..16b24be 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer.java
@@ -146,7 +146,7 @@ public class TestStochasticLoadBalancer extends BalancerTestBase {
};
- @Test
+ @Test (timeout=180000)
public void testKeepRegionLoad() throws Exception {
ServerName sn = ServerName.valueOf("test:8080", 100);
@@ -188,7 +188,7 @@ public class TestStochasticLoadBalancer extends BalancerTestBase {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testBalanceCluster() throws Exception {
for (int[] mockCluster : clusterStateMocks) {
@@ -209,7 +209,7 @@ public class TestStochasticLoadBalancer extends BalancerTestBase {
}
- @Test
+ @Test (timeout=180000)
public void testSkewCost() {
Configuration conf = HBaseConfiguration.create();
StochasticLoadBalancer.CostFunction
@@ -235,7 +235,7 @@ public class TestStochasticLoadBalancer extends BalancerTestBase {
assertEquals(1, costFunction.cost(), 0.01);
}
- @Test
+ @Test (timeout=180000)
public void testTableSkewCost() {
Configuration conf = HBaseConfiguration.create();
StochasticLoadBalancer.CostFunction
@@ -249,7 +249,7 @@ public class TestStochasticLoadBalancer extends BalancerTestBase {
}
}
- @Test
+ @Test (timeout=180000)
public void testCostFromArray() {
Configuration conf = HBaseConfiguration.create();
StochasticLoadBalancer.CostFromRegionLoadFunction
@@ -308,7 +308,7 @@ public class TestStochasticLoadBalancer extends BalancerTestBase {
assertNull(plans);
}
- @Test
+ @Test (timeout=180000)
public void testReplicaCost() {
Configuration conf = HBaseConfiguration.create();
StochasticLoadBalancer.CostFunction
@@ -322,7 +322,7 @@ public class TestStochasticLoadBalancer extends BalancerTestBase {
}
}
- @Test
+ @Test (timeout=180000)
public void testReplicaCostForReplicas() {
Configuration conf = HBaseConfiguration.create();
StochasticLoadBalancer.CostFunction
@@ -397,7 +397,7 @@ public class TestStochasticLoadBalancer extends BalancerTestBase {
assertTrue(costWith2ReplicasOnTwoServers < costWith3ReplicasSameServer);
}
- @Test
+ @Test (timeout=180000)
public void testNeedsBalanceForColocatedReplicas() {
// check for the case where there are two hosts and with one rack, and where
// both the replicas are hosted on the same server
@@ -494,7 +494,7 @@ public class TestStochasticLoadBalancer extends BalancerTestBase {
// TODO(eclark): Make sure that the tables are well distributed.
}
- @Test
+ @Test (timeout=180000)
public void testLargeCluster() {
int numNodes = 1000;
int numRegions = 100000; //100 regions per RS
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java
index 92c7bb6..4def26a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java
@@ -54,7 +54,7 @@ public class TestCleanerChore {
}
- @Test
+ @Test (timeout=60000)
public void testSavesFilesOnRequest() throws Exception {
Stoppable stop = new StoppableImplementation();
Configuration conf = UTIL.getConfiguration();
@@ -81,7 +81,7 @@ public class TestCleanerChore {
assertTrue("Empty directory didn't get deleted", fs.exists(parent));
}
- @Test
+ @Test (timeout=60000)
public void testDeletesEmptyDirectories() throws Exception {
Stoppable stop = new StoppableImplementation();
Configuration conf = UTIL.getConfiguration();
@@ -122,7 +122,7 @@ public class TestCleanerChore {
* directory.
* @throws Exception on failure
*/
- @Test
+ @Test (timeout=60000)
public void testDoesNotCheckDirectories() throws Exception {
Stoppable stop = new StoppableImplementation();
Configuration conf = UTIL.getConfiguration();
@@ -153,7 +153,7 @@ public class TestCleanerChore {
Mockito.reset(spy);
}
- @Test
+ @Test (timeout=60000)
public void testStoppedCleanerDoesNotDeleteFiles() throws Exception {
Stoppable stop = new StoppableImplementation();
Configuration conf = UTIL.getConfiguration();
@@ -184,7 +184,7 @@ public class TestCleanerChore {
* another file added, in which case the directory shouldn't be deleted.
* @throws IOException on failure
*/
- @Test
+ @Test (timeout=60000)
public void testCleanerDoesNotDeleteDirectoryWithLateAddedFiles() throws IOException {
Stoppable stop = new StoppableImplementation();
Configuration conf = UTIL.getConfiguration();
@@ -238,7 +238,7 @@ public class TestCleanerChore {
* This was from HBASE-7465.
* @throws Exception on failure
*/
- @Test
+ @Test (timeout=60000)
public void testNoExceptionFromDirectoryWithRacyChildren() throws Exception {
Stoppable stop = new StoppableImplementation();
// need to use a localutil to not break the rest of the test that runs on the local FS, which
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java
index 078aaa6..10a45c5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java
@@ -64,7 +64,7 @@ public class TestHFileCleaner {
UTIL.shutdownMiniDFSCluster();
}
- @Test
+ @Test (timeout=180000)
public void testTTLCleaner() throws IOException, InterruptedException {
FileSystem fs = UTIL.getDFSCluster().getFileSystem();
Path root = UTIL.getDataTestDirOnTestFS();
@@ -162,7 +162,7 @@ public class TestHFileCleaner {
EnvironmentEdgeManager.injectEdge(originalEdge);
}
- @Test
+ @Test (timeout=180000)
public void testRemovesEmptyDirectories() throws Exception {
Configuration conf = UTIL.getConfiguration();
// no cleaner policies = delete all files
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java
index 66874e6..6dd6cd5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java
@@ -54,7 +54,7 @@ public class TestHFileLinkCleaner {
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
- @Test
+ @Test (timeout=60000)
public void testHFileLinkCleaning() throws Exception {
Configuration conf = TEST_UTIL.getConfiguration();
FSUtils.setRootDir(conf, TEST_UTIL.getDataTestDir());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
index 768b015..a350b6f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
@@ -67,7 +67,7 @@ public class TestLogsCleaner {
TEST_UTIL.shutdownMiniZKCluster();
}
- @Test
+ @Test (timeout=180000)
public void testLogCleaning() throws Exception{
Configuration conf = TEST_UTIL.getConfiguration();
// set TTL
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDeleteFamilyHandler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDeleteFamilyHandler.java
index 5b2f4f6..38c9523 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDeleteFamilyHandler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDeleteFamilyHandler.java
@@ -99,7 +99,7 @@ public class TestTableDeleteFamilyHandler {
TEST_UTIL.ensureSomeRegionServersAvailable(2);
}
- @Test
+ @Test (timeout=300000)
public void deleteColumnFamilyWithMultipleRegions() throws Exception {
Admin admin = TEST_UTIL.getHBaseAdmin();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDescriptorModification.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDescriptorModification.java
index 0d51875..7df7304 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDescriptorModification.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDescriptorModification.java
@@ -80,7 +80,7 @@ public class TestTableDescriptorModification {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=300000)
public void testModifyTable() throws IOException {
Admin admin = TEST_UTIL.getHBaseAdmin();
// Create a table with one family
@@ -103,7 +103,7 @@ public class TestTableDescriptorModification {
}
}
- @Test
+ @Test (timeout=300000)
public void testAddColumn() throws IOException {
Admin admin = TEST_UTIL.getHBaseAdmin();
// Create a table with two families
@@ -123,7 +123,7 @@ public class TestTableDescriptorModification {
}
}
- @Test
+ @Test (timeout=300000)
public void testDeleteColumn() throws IOException {
Admin admin = TEST_UTIL.getHBaseAdmin();
// Create a table with two families
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java
index 1da38b8..3c9c4ab 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java
@@ -95,7 +95,7 @@ public class TestSnapshotFileCache {
createAndTestSnapshotV2(cache, "snapshot2b", true, true);
}
- @Test
+ @Test (timeout=180000)
public void testJustFindLogsDirectory() throws Exception {
// don't refresh the cache unless we tell it to
long period = Long.MAX_VALUE;
@@ -130,7 +130,7 @@ public class TestSnapshotFileCache {
assertTrue("Cache didn't find:" + log, !Iterables.contains(notSnapshot, log));
}
- @Test
+ @Test (timeout=180000)
public void testReloadModifiedDirectory() throws IOException {
// don't refresh the cache unless we tell it to
long period = Long.MAX_VALUE;
@@ -146,7 +146,7 @@ public class TestSnapshotFileCache {
createAndTestSnapshotV2(cache, "snapshot2", false, false);
}
- @Test
+ @Test (timeout=180000)
public void testSnapshotTempDirReload() throws IOException {
long period = Long.MAX_VALUE;
// This doesn't refresh cache until we invoke it explicitly
@@ -164,7 +164,7 @@ public class TestSnapshotFileCache {
createAndTestSnapshotV2(cache, "snapshot2", true, false);
}
- @Test
+ @Test (timeout=180000)
public void testWeNeverCacheTmpDirAndLoadIt() throws Exception {
final AtomicInteger count = new AtomicInteger(0);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java
index 5e5b004..38d98d0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java
@@ -54,7 +54,7 @@ public class TestSnapshotHFileCleaner {
fs.delete(rootDir, true);
}
- @Test
+ @Test (timeout=60000)
public void testFindsSnapshotFilesWhenCleaning() throws IOException {
Configuration conf = TEST_UTIL.getConfiguration();
FSUtils.setRootDir(conf, TEST_UTIL.getDataTestDir());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotLogCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotLogCleaner.java
index 9a7d469..86f51d0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotLogCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotLogCleaner.java
@@ -52,7 +52,7 @@ public class TestSnapshotLogCleaner {
fs.delete(rootDir, true);
}
- @Test
+ @Test (timeout=60000)
public void testFindsSnapshotFilesWhenCleaning() throws IOException {
Configuration conf = TEST_UTIL.getConfiguration();
FSUtils.setRootDir(conf, TEST_UTIL.getDataTestDir());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotManager.java
index 7dd6377..a1c5a4b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotManager.java
@@ -79,7 +79,7 @@ public class TestSnapshotManager {
return new SnapshotManager(services, metrics, coordinator, pool);
}
- @Test
+ @Test (timeout=60000)
public void testInProcess() throws KeeperException, IOException {
TableName tableName = TableName.valueOf("testTable");
SnapshotManager manager = getNewManager();
@@ -98,7 +98,7 @@ public class TestSnapshotManager {
/**
* Verify the snapshot support based on the configuration.
*/
- @Test
+ @Test (timeout=60000)
public void testSnapshotSupportConfiguration() throws Exception {
// No configuration (no cleaners, not enabled): snapshot feature disabled
Configuration conf = new Configuration();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestMemoryBoundedLogMessageBuffer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestMemoryBoundedLogMessageBuffer.java
index f64b297..6bd212c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestMemoryBoundedLogMessageBuffer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestMemoryBoundedLogMessageBuffer.java
@@ -40,7 +40,7 @@ public class TestMemoryBoundedLogMessageBuffer {
private static final long TEN_KB = 10 * 1024;
private static final String JP_TEXT = "こんにちは";
- @Test
+ @Test (timeout=60000)
public void testBuffer() {
MemoryBoundedLogMessageBuffer buf =
new MemoryBoundedLogMessageBuffer(TEN_KB);
@@ -62,7 +62,7 @@ public class TestMemoryBoundedLogMessageBuffer {
dump.contains("hello 999" + eol));
}
- @Test
+ @Test (timeout=60000)
public void testNonAsciiEncoding() {
MemoryBoundedLogMessageBuffer buf =
new MemoryBoundedLogMessageBuffer(TEN_KB);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
index e54d0f6..fa83107 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
@@ -30,7 +30,7 @@ import org.junit.experimental.categories.Category;
@Category({MiscTests.class, SmallTests.class})
public class TestTaskMonitor {
- @Test
+ @Test (timeout=60000)
public void testTaskMonitorBasics() {
TaskMonitor tm = new TaskMonitor();
assertTrue("Task monitor should start empty",
@@ -57,7 +57,7 @@ public class TestTaskMonitor {
assertEquals(0, tm.getTasks().size());
}
- @Test
+ @Test (timeout=60000)
public void testTasksGetAbortedOnLeak() throws InterruptedException {
final TaskMonitor tm = new TaskMonitor();
assertTrue("Task monitor should start empty",
@@ -88,7 +88,7 @@ public class TestTaskMonitor {
assertEquals(MonitoredTask.State.ABORTED, taskFromTm.getState());
}
- @Test
+ @Test (timeout=60000)
public void testTaskLimit() throws Exception {
TaskMonitor tm = new TaskMonitor();
for (int i = 0; i < TaskMonitor.MAX_TASKS + 10; i++) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
index f919078..f37d295 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
@@ -162,7 +162,7 @@ public class TestNamespaceAuditor {
}
}
- @Test
+ @Test (timeout=180000)
public void testValidQuotas() throws Exception {
boolean exceptionCaught = false;
FileSystem fs = UTIL.getHBaseCluster().getMaster().getMasterFileSystem().getFileSystem();
@@ -221,7 +221,7 @@ public class TestNamespaceAuditor {
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteTable() throws Exception {
String namespace = prefix + "_dummy";
NamespaceDescriptor nspDesc =
@@ -272,7 +272,7 @@ public class TestNamespaceAuditor {
}
}
- @Test
+ @Test (timeout=180000)
public void testRegionMerge() throws Exception {
String nsp1 = prefix + "_regiontest";
NamespaceDescriptor nspDesc = NamespaceDescriptor.create(nsp1)
@@ -334,7 +334,7 @@ public class TestNamespaceAuditor {
htable.close();
}
- @Test
+ @Test (timeout=180000)
public void testRegionOperations() throws Exception {
String nsp1 = prefix + "_regiontest";
NamespaceDescriptor nspDesc = NamespaceDescriptor.create(nsp1)
@@ -427,7 +427,7 @@ public class TestNamespaceAuditor {
}
}
- @Test
+ @Test (timeout=180000)
public void testStatePreserve() throws Exception {
final String nsp1 = prefix + "_testStatePreserve";
NamespaceDescriptor nspDesc = NamespaceDescriptor.create(nsp1)
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java
index 710e631..a01bb5f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java
@@ -96,7 +96,7 @@ public class TestProcedureCoordinator {
* Currently we can only handle one procedure at a time. This makes sure we handle that and
* reject submitting more.
*/
- @Test
+ @Test (timeout=60000)
public void testThreadPoolSize() throws Exception {
ProcedureCoordinator coordinator = buildNewCoordinator();
Procedure proc = new Procedure(coordinator, monitor,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java
index a2c86a1..968aa69 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java
@@ -59,7 +59,7 @@ public class TestProcedureManager {
util.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=60000)
public void testSimpleProcedureManager() throws IOException {
Admin admin = util.getHBaseAdmin();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java
index 2d7a68f..30be7a5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java
@@ -399,7 +399,7 @@ public class TestProcedureMember {
* correctly build a new task for the requested operation
* @throws Exception on failure
*/
- @Test
+ @Test (timeout=60000)
public void testNoTaskToBeRunFromRequest() throws Exception {
ThreadPoolExecutor pool = mock(ThreadPoolExecutor.class);
when(mockBuilder.buildSubprocedure(op, data)).thenReturn(null)
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
index 211e9e6..1b8f3c9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
@@ -100,17 +100,17 @@ public class TestZKProcedure {
});
}
- @Test
+ @Test (timeout=180000)
public void testEmptyMemberSet() throws Exception {
runCommit();
}
- @Test
+ @Test (timeout=180000)
public void testSingleMember() throws Exception {
runCommit("one");
}
- @Test
+ @Test (timeout=180000)
public void testMultipleMembers() throws Exception {
runCommit("one", "two", "three", "four" );
}
@@ -198,7 +198,7 @@ public class TestZKProcedure {
* Test a distributed commit with multiple cohort members, where one of the cohort members has a
* timeout exception during the prepare stage.
*/
- @Test
+ @Test (timeout=180000)
public void testMultiCohortWithMemberTimeoutDuringPrepare() throws Exception {
String opDescription = "error injection coordination";
String[] cohortMembers = new String[] { "one", "two", "three" };
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
index 52d4552..d73fbbe 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
@@ -238,7 +238,7 @@ public class TestZKProcedureControllers {
}
// TODO Broken by composition.
-// @Test
+// @Test (timeout=180000)
// public void testCoordinatorControllerHandlesEarlyPrepareNodes() throws Exception {
// runEarlyPrepareNodes(startCoordinatorFirst, "testEarlyPreparenodes", new byte[] { 1, 2, 3 },
// "cohort1", "cohort2");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java
index b2d8b38..f690532 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java
@@ -50,7 +50,7 @@ import com.google.protobuf.ByteString;
*/
@Category({MiscTests.class, SmallTests.class})
public class TestProtobufUtil {
- @Test
+ @Test (timeout=60000)
public void testException() throws IOException {
NameBytesPair.Builder builder = NameBytesPair.newBuilder();
final String omg = "OMG!!!";
@@ -70,7 +70,7 @@ public class TestProtobufUtil {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testGet() throws IOException {
ClientProtos.Get.Builder getBuilder = ClientProtos.Get.newBuilder();
getBuilder.setRow(ByteString.copyFromUtf8("row"));
@@ -103,7 +103,7 @@ public class TestProtobufUtil {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testAppend() throws IOException {
long timeStamp = 111111;
MutationProto.Builder mutateBuilder = MutationProto.newBuilder();
@@ -144,7 +144,7 @@ public class TestProtobufUtil {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testDelete() throws IOException {
MutationProto.Builder mutateBuilder = MutationProto.newBuilder();
mutateBuilder.setRow(ByteString.copyFromUtf8("row"));
@@ -191,7 +191,7 @@ public class TestProtobufUtil {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testIncrement() throws IOException {
MutationProto.Builder mutateBuilder = MutationProto.newBuilder();
mutateBuilder.setRow(ByteString.copyFromUtf8("row"));
@@ -225,7 +225,7 @@ public class TestProtobufUtil {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testPut() throws IOException {
MutationProto.Builder mutateBuilder = MutationProto.newBuilder();
mutateBuilder.setRow(ByteString.copyFromUtf8("row"));
@@ -275,7 +275,7 @@ public class TestProtobufUtil {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testScan() throws IOException {
ClientProtos.Scan.Builder scanBuilder = ClientProtos.Scan.newBuilder();
scanBuilder.setStartRow(ByteString.copyFromUtf8("row1"));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java
index 057a35d..c17e977 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java
@@ -39,7 +39,7 @@ public class TestReplicationProtobuf {
* Little test to check we can basically convert list of a list of KVs into a CellScanner
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testGetCellScanner() throws IOException {
List a = new ArrayList();
KeyValue akv = new KeyValue(Bytes.toBytes("a"), -1L);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
index 18dd5ae..fe2a74a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java
@@ -75,7 +75,7 @@ public class TestQuotaAdmin {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testSimpleScan() throws Exception {
Admin admin = TEST_UTIL.getHBaseAdmin();
String userName = User.getCurrent().getShortName();
@@ -119,7 +119,7 @@ public class TestQuotaAdmin {
assertNumResults(0, null);
}
- @Test
+ @Test (timeout=180000)
public void testQuotaRetrieverFilter() throws Exception {
Admin admin = TEST_UTIL.getHBaseAdmin();
TableName[] tables = new TableName[] {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaTableUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaTableUtil.java
index 34239c0..570fdc8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaTableUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaTableUtil.java
@@ -80,7 +80,7 @@ public class TestQuotaTableUtil {
this.connection.close();
}
- @Test
+ @Test (timeout=180000)
public void testTableQuotaUtil() throws Exception {
final TableName table = TableName.valueOf("testTableQuotaUtilTable");
@@ -103,7 +103,7 @@ public class TestQuotaTableUtil {
assertEquals(null, resQuota);
}
- @Test
+ @Test (timeout=180000)
public void testNamespaceQuotaUtil() throws Exception {
final String namespace = "testNamespaceQuotaUtilNS";
@@ -126,7 +126,7 @@ public class TestQuotaTableUtil {
assertEquals(null, resQuota);
}
- @Test
+ @Test (timeout=180000)
public void testUserQuotaUtil() throws Exception {
final TableName table = TableName.valueOf("testUserQuotaUtilTable");
final String namespace = "testNS";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestRateLimiter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestRateLimiter.java
index 50897a2..ee280d3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestRateLimiter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestRateLimiter.java
@@ -36,22 +36,22 @@ import static org.junit.Assert.assertTrue;
*/
@Category({RegionServerTests.class, SmallTests.class})
public class TestRateLimiter {
- @Test
+ @Test (timeout=60000)
public void testWaitIntervalTimeUnitSeconds() {
testWaitInterval(TimeUnit.SECONDS, 10, 100);
}
- @Test
+ @Test (timeout=60000)
public void testWaitIntervalTimeUnitMinutes() {
testWaitInterval(TimeUnit.MINUTES, 10, 6000);
}
- @Test
+ @Test (timeout=60000)
public void testWaitIntervalTimeUnitHours() {
testWaitInterval(TimeUnit.HOURS, 10, 360000);
}
- @Test
+ @Test (timeout=60000)
public void testWaitIntervalTimeUnitDays() {
testWaitInterval(TimeUnit.DAYS, 10, 8640000);
}
@@ -93,7 +93,7 @@ public class TestRateLimiter {
}
}
- @Test
+ @Test (timeout=60000)
public void testOverconsumption() {
RateLimiter limiter = new RateLimiter();
limiter.set(10, TimeUnit.SECONDS);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
index 192c989..b2758c2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
@@ -115,7 +115,7 @@ public class TestAtomicOperation {
* More tests in
* @see org.apache.hadoop.hbase.client.TestFromClientSide#testAppend()
*/
- @Test
+ @Test (timeout=180000)
public void testAppend() throws IOException {
initHRegion(tableName, name.getMethodName(), fam1);
String v1 = "Ultimate Answer to the Ultimate Question of Life,"+
@@ -137,7 +137,7 @@ public class TestAtomicOperation {
/**
* Test multi-threaded increments.
*/
- @Test
+ @Test (timeout=180000)
public void testIncrementMultiThreads() throws IOException {
LOG.info("Starting test testIncrementMultiThreads");
@@ -252,7 +252,7 @@ public class TestAtomicOperation {
}
}
- @Test
+ @Test (timeout=180000)
public void testAppendMultiThreads() throws IOException {
LOG.info("Starting test testAppendMultiThreads");
// run a with mixed column families (1 and 3 versions)
@@ -314,7 +314,7 @@ public class TestAtomicOperation {
/**
* Test multi-threaded row mutations.
*/
- @Test
+ @Test (timeout=180000)
public void testRowMutationMultiThreads() throws IOException {
LOG.info("Starting test testRowMutationMultiThreads");
@@ -406,7 +406,7 @@ public class TestAtomicOperation {
/**
* Test multi-threaded region mutations.
*/
- @Test
+ @Test (timeout=180000)
public void testMultiRowMutationMultiThreads() throws IOException {
LOG.info("Starting test testMultiRowMutationMultiThreads");
@@ -532,7 +532,7 @@ public class TestAtomicOperation {
*
* Moved into TestAtomicOperation from its original location, TestHBase7051
*/
- @Test
+ @Test (timeout=180000)
public void testPutAndCheckAndPutInParallel() throws Exception {
final String tableName = "testPutAndCheckAndPut";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
index 1ae17ed..e830f82 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
@@ -218,7 +218,7 @@ public class TestBlocksRead extends HBaseTestCase {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testBlocksRead() throws Exception {
byte[] TABLE = Bytes.toBytes("testBlocksRead");
String FAMILY = "cf1";
@@ -275,7 +275,7 @@ public class TestBlocksRead extends HBaseTestCase {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testLazySeekBlocksRead() throws Exception {
byte[] TABLE = Bytes.toBytes("testLazySeekBlocksRead");
String FAMILY = "cf1";
@@ -383,7 +383,7 @@ public class TestBlocksRead extends HBaseTestCase {
* Test # of blocks read to ensure disabling cache-fill on Scan works.
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testBlocksStoredWhenCachingDisabled() throws Exception {
byte [] TABLE = Bytes.toBytes("testBlocksReadWhenCachingDisabled");
String FAMILY = "cf1";
@@ -428,7 +428,7 @@ public class TestBlocksRead extends HBaseTestCase {
}
}
- @Test
+ @Test (timeout=180000)
public void testLazySeekBlocksReadWithDelete() throws Exception {
byte[] TABLE = Bytes.toBytes("testLazySeekBlocksReadWithDelete");
String FAMILY = "cf1";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java
index 25330a8..66b787e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java
@@ -57,7 +57,7 @@ public class TestBlocksScanned extends HBaseTestCase {
TEST_UTIL = new HBaseTestingUtility();
}
- @Test
+ @Test (timeout=60000)
public void testBlocksScanned() throws Exception {
byte [] tableName = Bytes.toBytes("TestBlocksScanned");
HTableDescriptor table = new HTableDescriptor(TableName.valueOf(tableName));
@@ -72,7 +72,7 @@ public class TestBlocksScanned extends HBaseTestCase {
_testBlocksScanned(table);
}
- @Test
+ @Test (timeout=60000)
public void testBlocksScannedWithEncoding() throws Exception {
byte [] tableName = Bytes.toBytes("TestBlocksScannedWithEncoding");
HTableDescriptor table = new HTableDescriptor(TableName.valueOf(tableName));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
index 15dbef5..6ff6242 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
@@ -108,7 +108,7 @@ public class TestBulkLoad {
random.nextBytes(randomBytes);
}
- @Test
+ @Test (timeout=60000)
public void verifyBulkLoadEvent() throws IOException {
TableName tableName = TableName.valueOf("test", "test");
List> familyPaths = withFamilyPathsFor(family1);
@@ -133,25 +133,25 @@ public class TestBulkLoad {
.bulkLoadHFiles(familyPaths, false);
}
- @Test
+ @Test (timeout=60000)
public void bulkHLogShouldThrowNoErrorAndWriteMarkerWithBlankInput() throws IOException {
testRegionWithFamilies(family1).bulkLoadHFiles(new ArrayList>(), false);
}
- @Test
+ @Test (timeout=60000)
public void shouldBulkLoadSingleFamilyHLog() throws IOException {
context.checking(callOnce);
testRegionWithFamilies(family1).bulkLoadHFiles(withFamilyPathsFor(family1), false);
}
- @Test
+ @Test (timeout=60000)
public void shouldBulkLoadManyFamilyHLog() throws IOException {
context.checking(callOnce);
testRegionWithFamilies(family1, family2).bulkLoadHFiles(withFamilyPathsFor(family1, family2),
false);
}
- @Test
+ @Test (timeout=60000)
public void shouldBulkLoadManyFamilyHLogEvenWhenTableNameNamespaceSpecified() throws IOException {
context.checking(callOnce);
TableName tableName = TableName.valueOf("test", "test");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
index dc142d6..cc93367 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
@@ -205,7 +205,7 @@ public class TestCacheOnWriteInSchema {
}
}
- @Test
+ @Test (timeout=180000)
public void testCacheOnWriteInSchema() throws IOException {
// Write some random data into the store
StoreFile.Writer writer = store.createWriterInTmp(Integer.MAX_VALUE,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestClusterId.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestClusterId.java
index baea563..5bde93e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestClusterId.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestClusterId.java
@@ -67,7 +67,7 @@ public class TestClusterId {
}
}
- @Test
+ @Test (timeout=180000)
public void testClusterId() throws Exception {
TEST_UTIL.startMiniZKCluster();
TEST_UTIL.startMiniDFSCluster(1);
@@ -91,7 +91,7 @@ public class TestClusterId {
assertEquals(clusterId, rst.getRegionServer().getClusterId());
}
- @Test
+ @Test (timeout=180000)
public void testRewritingClusterIdToPB() throws Exception {
TEST_UTIL.startMiniZKCluster();
TEST_UTIL.startMiniDFSCluster(1);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
index 7632a41..5f52e19 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
@@ -52,7 +52,7 @@ public class TestColumnSeeking {
static final Log LOG = LogFactory.getLog(TestColumnSeeking.class);
@SuppressWarnings("unchecked")
- @Test
+ @Test (timeout=60000)
public void testDuplicateVersions() throws IOException {
String family = "Family";
byte[] familyBytes = Bytes.toBytes("Family");
@@ -165,7 +165,7 @@ public class TestColumnSeeking {
}
@SuppressWarnings("unchecked")
- @Test
+ @Test (timeout=60000)
public void testReseeking() throws IOException {
String family = "Family";
byte[] familyBytes = Bytes.toBytes("Family");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
index 64668ad..002b828 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
@@ -134,7 +134,7 @@ public class TestCompaction {
* (used during RS shutdown)
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testInterruptCompaction() throws Exception {
assertEquals(0, count());
@@ -234,7 +234,7 @@ public class TestCompaction {
loader.flushcache();
}
- @Test
+ @Test (timeout=180000)
public void testCompactionWithCorruptResult() throws Exception {
int nfiles = 10;
for (int i = 0; i < nfiles; i++) {
@@ -274,7 +274,7 @@ public class TestCompaction {
* Create a custom compaction request and be sure that we can track it through the queue, knowing
* when the compaction is completed.
*/
- @Test
+ @Test (timeout=180000)
public void testTrackingCompactionRequest() throws Exception {
// setup a compact/split thread on a mock server
HRegionServer mockServer = Mockito.mock(HRegionServer.class);
@@ -303,7 +303,7 @@ public class TestCompaction {
* {@link CompactSplitThread}
* @throws Exception on failure
*/
- @Test
+ @Test (timeout=180000)
public void testMultipleCustomCompactionRequests() throws Exception {
// setup a compact/split thread on a mock server
HRegionServer mockServer = Mockito.mock(HRegionServer.class);
@@ -484,7 +484,7 @@ public class TestCompaction {
}
/** Test compaction priority management and multiple compactions per store (HBASE-8665). */
- @Test
+ @Test (timeout=180000)
public void testCompactionQueuePriorities() throws Exception {
// Setup a compact/split thread on a mock server.
final Configuration conf = HBaseConfiguration.create();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
index 3f5f905..ec78418 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
@@ -79,7 +79,7 @@ public class TestCompactionState {
compaction("testMinorCompactionOnFamily", 15, CompactionState.MINOR, true);
}
- @Test
+ @Test (timeout=300000)
public void testInvalidColumnFamily() throws IOException, InterruptedException {
TableName table = TableName.valueOf("testInvalidColumnFamily");
byte [] family = Bytes.toBytes("family");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
index d7b4a04..a35d53c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
@@ -145,7 +145,7 @@ public class TestCompoundBloomFilter {
return kvList;
}
- @Test
+ @Test (timeout=180000)
public void testCompoundBloomFilter() throws IOException {
conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true);
for (int t = 0; t < NUM_TESTS; ++t) {
@@ -329,7 +329,7 @@ public class TestCompoundBloomFilter {
return w.getPath();
}
- @Test
+ @Test (timeout=180000)
public void testCompoundBloomSizing() {
int bloomBlockByteSize = 4096;
int bloomBlockBitSize = bloomBlockByteSize * 8;
@@ -345,7 +345,7 @@ public class TestCompoundBloomFilter {
assertTrue(Math.abs(bloomSizeRatio - 0.9999) < 0.0001);
}
- @Test
+ @Test (timeout=180000)
public void testCreateKey() {
CompoundBloomFilterBase cbfb = new CompoundBloomFilterBase();
byte[] row = "myRow".getBytes();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultStoreEngine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultStoreEngine.java
index c185075..f76d2f2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultStoreEngine.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultStoreEngine.java
@@ -51,7 +51,7 @@ public class TestDefaultStoreEngine {
}
}
- @Test
+ @Test (timeout=60000)
public void testCustomParts() throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.set(DefaultStoreEngine.DEFAULT_COMPACTOR_CLASS_KEY, DummyCompactor.class.getName());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
index b791fdb..226c66e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
@@ -91,7 +91,7 @@ public class TestEncryptionKeyRotation {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testCFKeyRotation() throws Exception {
// Create the table schema
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("default",
@@ -152,7 +152,7 @@ public class TestEncryptionKeyRotation {
}
}
- @Test
+ @Test (timeout=180000)
public void testMasterKeyRotation() throws Exception {
// Create the table schema
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("default",
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
index ebfc89c..a02ffdd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
@@ -120,7 +120,7 @@ public class TestEncryptionRandomKeying {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testRandomKeying() throws Exception {
// Verify we have store file(s) with a random key
final List initialPaths = findStorefilePaths(htd.getTableName());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
index f29601c..a2b0d76 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
@@ -95,7 +95,7 @@ public class TestEndToEndSplitTransaction {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=300000)
public void testMasterOpsWhileSplitting() throws Exception {
TableName tableName =
TableName.valueOf("TestSplit");
@@ -185,7 +185,7 @@ public class TestEndToEndSplitTransaction {
/**
* Tests that the client sees meta table changes as atomic during splits
*/
- @Test
+ @Test (timeout=300000)
public void testFromClientSideWhileSplitting() throws Throwable {
LOG.info("Starting testFromClientSideWhileSplitting");
final TableName TABLENAME =
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestExplicitColumnTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestExplicitColumnTracker.java
index 72d7aa9..e8872ed 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestExplicitColumnTracker.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestExplicitColumnTracker.java
@@ -69,7 +69,7 @@ public class TestExplicitColumnTracker {
}
}
- @Test
+ @Test (timeout=60000)
public void testGet_SingleVersion() throws IOException{
//Create tracker
TreeSet columns = new TreeSet(Bytes.BYTES_COMPARATOR);
@@ -95,7 +95,7 @@ public class TestExplicitColumnTracker {
runTest(maxVersions, columns, scanner, expected, 0);
}
- @Test
+ @Test (timeout=60000)
public void testGet_MultiVersion() throws IOException{
//Create tracker
TreeSet columns = new TreeSet(Bytes.BYTES_COMPARATOR);
@@ -147,7 +147,7 @@ public class TestExplicitColumnTracker {
runTest(maxVersions, columns, scanner, expected, 0);
}
- @Test
+ @Test (timeout=60000)
public void testGet_MultiVersionWithLookAhead() throws IOException{
//Create tracker
TreeSet columns = new TreeSet(Bytes.BYTES_COMPARATOR);
@@ -202,7 +202,7 @@ public class TestExplicitColumnTracker {
/**
* hbase-2259
*/
- @Test
+ @Test (timeout=60000)
public void testStackOverflow() throws IOException{
int maxVersions = 1;
TreeSet columns = new TreeSet(Bytes.BYTES_COMPARATOR);
@@ -229,7 +229,7 @@ public class TestExplicitColumnTracker {
/**
* Regression test for HBASE-2545
*/
- @Test
+ @Test (timeout=60000)
public void testInfiniteLoop() throws IOException {
TreeSet columns = new TreeSet(Bytes.BYTES_COMPARATOR);
columns.addAll(Arrays.asList(new byte[][] {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
index 0d3fa13..a2aed68 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
@@ -73,7 +73,7 @@ public class TestFSErrorsExposed {
* Injects errors into the pread calls of an on-disk file, and makes
* sure those bubble up to the HFile scanner
*/
- @Test
+ @Test (timeout=180000)
public void testHFileScannerThrowsErrors() throws IOException {
Path hfilePath = new Path(new Path(
util.getDataTestDir("internalScannerExposesErrors"),
@@ -123,7 +123,7 @@ public class TestFSErrorsExposed {
* Injects errors into the pread calls of an on-disk file, and makes
* sure those bubble up to the StoreFileScanner
*/
- @Test
+ @Test (timeout=180000)
public void testStoreFileScannerThrowsErrors() throws IOException {
Path hfilePath = new Path(new Path(
util.getDataTestDir("internalScannerExposesErrors"),
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFlushRegionEntry.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFlushRegionEntry.java
index bd50f59..aace42b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFlushRegionEntry.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFlushRegionEntry.java
@@ -32,7 +32,7 @@ public class TestFlushRegionEntry {
EnvironmentEdgeManager.injectEdge(edge);
}
- @Test
+ @Test (timeout=60000)
public void test() {
HRegion r = Mockito.mock(HRegion.class);
FlushRegionEntry entry = new FlushRegionEntry(r, true);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
index 0d7820f..0b9965a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
@@ -67,7 +67,7 @@ public class TestGetClosestAtOrBefore extends HBaseTestCase {
- @Test
+ @Test (timeout=180000)
public void testUsingMetaAndBinary() throws IOException {
FileSystem filesystem = FileSystem.get(conf);
Path rootdir = testDir;
@@ -184,7 +184,7 @@ public class TestGetClosestAtOrBefore extends HBaseTestCase {
* Test file of multiple deletes and with deletes as final key.
* @see HBASE-751
*/
- @Test
+ @Test (timeout=180000)
public void testGetClosestRowBefore3() throws IOException{
HRegion region = null;
byte [] c0 = COLUMNS[0];
@@ -293,7 +293,7 @@ public class TestGetClosestAtOrBefore extends HBaseTestCase {
}
/** For HBASE-694 */
- @Test
+ @Test (timeout=180000)
public void testGetClosestRowBefore2() throws IOException{
HRegion region = null;
byte [] c0 = COLUMNS[0];
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 2930f72..135950e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -463,7 +463,7 @@ public class TestHRegion {
FileSystem.closeAllForUGI(user.getUGI());
}
- @Test
+ @Test (timeout=180000)
public void testCompactionAffectedByScanners() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
@@ -511,7 +511,7 @@ public class TestHRegion {
assertEquals(0, results.size());
}
- @Test
+ @Test (timeout=180000)
public void testToShowNPEOnRegionScannerReseek() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
@@ -541,7 +541,7 @@ public class TestHRegion {
scanner1.close();
}
- @Test
+ @Test (timeout=180000)
public void testSkipRecoveredEditsReplay() throws Exception {
String method = "testSkipRecoveredEditsReplay";
TableName tableName = TableName.valueOf(method);
@@ -594,7 +594,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testSkipRecoveredEditsReplaySomeIgnored() throws Exception {
String method = "testSkipRecoveredEditsReplaySomeIgnored";
TableName tableName = TableName.valueOf(method);
@@ -652,7 +652,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testSkipRecoveredEditsReplayAllIgnored() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
@@ -684,7 +684,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testSkipRecoveredEditsReplayTheLastFileIgnored() throws Exception {
String method = "testSkipRecoveredEditsReplayTheLastFileIgnored";
TableName tableName = TableName.valueOf(method);
@@ -750,7 +750,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testRecoveredEditsReplayCompaction() throws Exception {
String method = name.getMethodName();
TableName tableName = TableName.valueOf(method);
@@ -845,7 +845,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testFlushMarkers() throws Exception {
// tests that flush markers are written to WAL and handled at recovered edits
String method = name.getMethodName();
@@ -999,7 +999,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
@SuppressWarnings("unchecked")
public void testFlushMarkersWALFail() throws Exception {
// test the cases where the WAL append for flush markers fail.
@@ -1083,7 +1083,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testGetWhileRegionClose() throws IOException {
TableName tableName = TableName.valueOf(name.getMethodName());
Configuration hc = initSplit();
@@ -1174,7 +1174,7 @@ public class TestHRegion {
/*
* An involved filter test. Has multiple column families and deletes in mix.
*/
- @Test
+ @Test (timeout=180000)
public void testWeirdCacheBehaviour() throws Exception {
byte[] TABLE = Bytes.toBytes("testWeirdCacheBehaviour");
byte[][] FAMILIES = new byte[][] { Bytes.toBytes("trans-blob"), Bytes.toBytes("trans-type"),
@@ -1217,7 +1217,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testAppendWithReadOnlyTable() throws Exception {
byte[] TABLE = Bytes.toBytes("readOnlyTable");
this.region = initHRegion(TABLE, getName(), CONF, true, Bytes.toBytes("somefamily"));
@@ -1237,7 +1237,7 @@ public class TestHRegion {
assertTrue(exceptionCaught == true);
}
- @Test
+ @Test (timeout=180000)
public void testIncrWithReadOnlyTable() throws Exception {
byte[] TABLE = Bytes.toBytes("readOnlyTable");
this.region = initHRegion(TABLE, getName(), CONF, true, Bytes.toBytes("somefamily"));
@@ -1329,7 +1329,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testFamilyWithAndWithoutColon() throws Exception {
byte[] b = Bytes.toBytes(getName());
byte[] cf = Bytes.toBytes(COLUMN_FAMILY);
@@ -1351,7 +1351,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testBatchPut_whileNoRowLocksHeld() throws IOException {
byte[] cf = Bytes.toBytes(COLUMN_FAMILY);
byte[] qual = Bytes.toBytes("qual");
@@ -1392,7 +1392,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testBatchPut_whileMultipleRowLocksHeld() throws Exception {
byte[] cf = Bytes.toBytes(COLUMN_FAMILY);
byte[] qual = Bytes.toBytes("qual");
@@ -1493,7 +1493,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testBatchPutWithTsSlop() throws Exception {
byte[] b = Bytes.toBytes(getName());
byte[] cf = Bytes.toBytes(COLUMN_FAMILY);
@@ -1532,7 +1532,7 @@ public class TestHRegion {
// ////////////////////////////////////////////////////////////////////////////
// checkAndMutate tests
// ////////////////////////////////////////////////////////////////////////////
- @Test
+ @Test (timeout=180000)
public void testCheckAndMutate_WithEmptyRowValue() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
@@ -1607,7 +1607,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testCheckAndMutate_WithWrongValue() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
@@ -1641,7 +1641,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testCheckAndMutate_WithCorrectValue() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
@@ -1674,7 +1674,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testCheckAndMutate_WithNonEqualCompareOp() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
@@ -1769,7 +1769,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testCheckAndPut_ThatPutWasWritten() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
@@ -1816,7 +1816,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testCheckAndPut_wrongRowInPut() throws IOException {
TableName tableName = TableName.valueOf(name.getMethodName());
this.region = initHRegion(tableName, this.getName(), CONF, COLUMNS);
@@ -1836,7 +1836,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testCheckAndDelete_ThatDeleteWasWritten() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
@@ -1916,7 +1916,7 @@ public class TestHRegion {
// ////////////////////////////////////////////////////////////////////////////
// Delete tests
// ////////////////////////////////////////////////////////////////////////////
- @Test
+ @Test (timeout=180000)
public void testDelete_multiDeleteColumn() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
@@ -1948,7 +1948,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testDelete_CheckFamily() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
@@ -1992,7 +1992,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testDelete_mixed() throws IOException, InterruptedException {
byte[] fam = Bytes.toBytes("info");
byte[][] families = { fam };
@@ -2060,7 +2060,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteRowWithFutureTs() throws IOException {
byte[] fam = Bytes.toBytes("info");
byte[][] families = { fam };
@@ -2103,7 +2103,7 @@ public class TestHRegion {
* Tests that the special LATEST_TIMESTAMP option for puts gets replaced by
* the actual timestamp
*/
- @Test
+ @Test (timeout=180000)
public void testPutWithLatestTS() throws IOException {
byte[] fam = Bytes.toBytes("info");
byte[][] families = { fam };
@@ -2155,7 +2155,7 @@ public class TestHRegion {
* bound. Note that the timestamp lower bound is automatically handled for us
* by the TTL field.
*/
- @Test
+ @Test (timeout=180000)
public void testPutWithTsSlop() throws IOException {
byte[] fam = Bytes.toBytes("info");
byte[][] families = { fam };
@@ -2184,7 +2184,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testScanner_DeleteOneFamilyNotAnother() throws IOException {
byte[] fam1 = Bytes.toBytes("columnA");
byte[] fam2 = Bytes.toBytes("columnB");
@@ -2226,14 +2226,14 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteColumns_PostInsert() throws IOException, InterruptedException {
Delete delete = new Delete(row);
delete.deleteColumns(fam1, qual1);
doTestDelete_AndPostInsert(delete);
}
- @Test
+ @Test (timeout=180000)
public void testDeleteFamily_PostInsert() throws IOException, InterruptedException {
Delete delete = new Delete(row);
delete.deleteFamily(fam1);
@@ -2285,7 +2285,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testDelete_CheckTimestampUpdated() throws IOException {
TableName tableName = TableName.valueOf(name.getMethodName());
byte[] row1 = Bytes.toBytes("row1");
@@ -2328,7 +2328,7 @@ public class TestHRegion {
// ////////////////////////////////////////////////////////////////////////////
// Get tests
// ////////////////////////////////////////////////////////////////////////////
- @Test
+ @Test (timeout=180000)
public void testGet_FamilyChecker() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
@@ -2356,7 +2356,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testGet_Basic() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
@@ -2408,7 +2408,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testGet_Empty() throws IOException {
byte[] row = Bytes.toBytes("row");
byte[] fam = Bytes.toBytes("fam");
@@ -2430,7 +2430,7 @@ public class TestHRegion {
// ////////////////////////////////////////////////////////////////////////////
// Merge test
// ////////////////////////////////////////////////////////////////////////////
- @Test
+ @Test (timeout=180000)
public void testMerge() throws IOException {
byte[][] families = { fam1, fam2, fam3 };
Configuration hc = initSplit();
@@ -2518,7 +2518,7 @@ public class TestHRegion {
// ////////////////////////////////////////////////////////////////////////////
// Scanner tests
// ////////////////////////////////////////////////////////////////////////////
- @Test
+ @Test (timeout=180000)
public void testGetScanner_WithOkFamilies() throws IOException {
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
@@ -2543,7 +2543,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testGetScanner_WithNotOkFamilies() throws IOException {
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
@@ -2569,7 +2569,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testGetScanner_WithNoFamilies() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
@@ -2618,7 +2618,7 @@ public class TestHRegion {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testGetScanner_WithRegionClosed() throws IOException {
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
@@ -2650,7 +2650,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testRegionScanner_Next() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] row2 = Bytes.toBytes("row2");
@@ -2716,7 +2716,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testScanner_ExplicitColumns_FromMemStore_EnforceVersions() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] qf1 = Bytes.toBytes("qualifier1");
@@ -2775,7 +2775,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testScanner_ExplicitColumns_FromFilesOnly_EnforceVersions() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] qf1 = Bytes.toBytes("qualifier1");
@@ -2838,7 +2838,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testScanner_ExplicitColumns_FromMemStoreAndFiles_EnforceVersions() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
@@ -2920,7 +2920,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testScanner_Wildcard_FromMemStore_EnforceVersions() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] qf1 = Bytes.toBytes("qualifier1");
@@ -2981,7 +2981,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testScanner_Wildcard_FromFilesOnly_EnforceVersions() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] qf1 = Bytes.toBytes("qualifier1");
@@ -3042,7 +3042,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testScanner_StopRow1542() throws IOException {
byte[] family = Bytes.toBytes("testFamily");
this.region = initHRegion(tableName, getName(), CONF, family);
@@ -3090,7 +3090,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testScanner_Wildcard_FromMemStoreAndFiles_EnforceVersions() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
@@ -3175,7 +3175,7 @@ public class TestHRegion {
* Here we test scan optimization when only subset of CFs are used in filter
* conditions.
*/
- @Test
+ @Test (timeout=180000)
public void testScanner_JoinedScanners() throws IOException {
byte[] cf_essential = Bytes.toBytes("essential");
byte[] cf_joined = Bytes.toBytes("joined");
@@ -3243,7 +3243,7 @@ public class TestHRegion {
*
* Test case when scan limits amount of KVs returned on each next() call.
*/
- @Test
+ @Test (timeout=180000)
public void testScanner_JoinedScannersWithLimits() throws IOException {
final byte[] cf_first = Bytes.toBytes("first");
final byte[] cf_second = Bytes.toBytes("second");
@@ -3341,7 +3341,7 @@ public class TestHRegion {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testBasicSplit() throws Exception {
byte[][] families = { fam1, fam2, fam3 };
@@ -3427,7 +3427,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testSplitRegion() throws IOException {
byte[] qualifier = Bytes.toBytes("qualifier");
Configuration hc = initSplit();
@@ -3466,7 +3466,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testClearForceSplit() throws IOException {
byte[] qualifier = Bytes.toBytes("qualifier");
Configuration hc = initSplit();
@@ -3544,7 +3544,7 @@ public class TestHRegion {
* @throws InterruptedException
* thread join
*/
- @Test
+ @Test (timeout=180000)
public void testFlushCacheWhileScanning() throws IOException, InterruptedException {
byte[] family = Bytes.toBytes("family");
int numRows = 1000;
@@ -3666,7 +3666,7 @@ public class TestHRegion {
* @throws InterruptedException
* when joining threads
*/
- @Test
+ @Test (timeout=180000)
public void testWritesWhileScanning() throws IOException, InterruptedException {
int testCount = 100;
int numRows = 1;
@@ -3831,7 +3831,7 @@ public class TestHRegion {
* @throws InterruptedException
* when joining threads
*/
- @Test
+ @Test (timeout=180000)
public void testWritesWhileGetting() throws Exception {
int testCount = 100;
int numRows = 1;
@@ -3941,7 +3941,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testHolesInMeta() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, Bytes.toBytes("x"), Bytes.toBytes("z"), method, CONF,
@@ -3964,7 +3964,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testIndexesScanWithOneDeletedRow() throws IOException {
byte[] family = Bytes.toBytes("family");
@@ -4006,7 +4006,7 @@ public class TestHRegion {
// ////////////////////////////////////////////////////////////////////////////
// Bloom filter test
// ////////////////////////////////////////////////////////////////////////////
- @Test
+ @Test (timeout=180000)
public void testBloomFilterSize() throws IOException {
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qf1 = Bytes.toBytes("col");
@@ -4064,7 +4064,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testAllColumnsWithBloomFilter() throws IOException {
byte[] TABLE = Bytes.toBytes("testAllColumnsWithBloomFilter");
byte[] FAMILY = Bytes.toBytes("family");
@@ -4112,7 +4112,7 @@ public class TestHRegion {
* issuing delete row on columns with bloom filter set to row+col
* (BloomType.ROWCOL)
*/
- @Test
+ @Test (timeout=180000)
public void testDeleteRowWithBloomFilter() throws IOException {
byte[] familyName = Bytes.toBytes("familyName");
@@ -4150,7 +4150,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testgetHDFSBlocksDistribution() throws Exception {
HBaseTestingUtility htu = new HBaseTestingUtility();
// Why do we set the block size in this test? If we set it smaller than the kvs, then we'll
@@ -4228,7 +4228,7 @@ public class TestHRegion {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testStatusSettingToAbortIfAnyExceptionDuringRegionInitilization() throws Exception {
TableName tableName = TableName.valueOf(name.getMethodName());
HRegionInfo info = null;
@@ -4263,7 +4263,7 @@ public class TestHRegion {
* Verifies that the .regioninfo file is written on region creation and that
* is recreated if missing during region opening.
*/
- @Test
+ @Test (timeout=180000)
public void testRegionInfoFileCreation() throws IOException {
Path rootDir = new Path(dir + "testRegionInfoFileCreation");
@@ -4345,7 +4345,7 @@ public class TestHRegion {
* Test case to check increment function with memstore flushing
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testParallelIncrementWithMemStoreFlush() throws Exception {
byte[] family = Incrementer.family;
this.region = initHRegion(tableName, method, CONF, family);
@@ -4432,7 +4432,7 @@ public class TestHRegion {
* Test case to check append function with memstore flushing
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testParallelAppendWithMemStoreFlush() throws Exception {
byte[] family = Appender.family;
this.region = initHRegion(tableName, method, CONF, family);
@@ -4492,7 +4492,7 @@ public class TestHRegion {
* Test case to check put function with memstore flushing for same row, same ts
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testPutWithMemStoreFlush() throws Exception {
byte[] family = Bytes.toBytes("family");
;
@@ -4548,7 +4548,7 @@ public class TestHRegion {
assertArrayEquals(Bytes.toBytes("value1"), CellUtil.cloneValue(kvs.get(0)));
}
- @Test
+ @Test (timeout=180000)
public void testDurability() throws Exception {
String method = "testDurability";
// there are 5 x 5 cases:
@@ -4651,7 +4651,7 @@ public class TestHRegion {
this.region = null;
}
- @Test
+ @Test (timeout=180000)
public void testRegionReplicaSecondary() throws IOException {
// create a primary region, load some data and flush
// create a secondary region, and do a get against that
@@ -4701,7 +4701,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testRegionReplicaSecondaryIsReadOnly() throws IOException {
// create a primary region, load some data and flush
// create a secondary region, and do a put against that
@@ -4764,7 +4764,7 @@ public class TestHRegion {
"hregion-" + RandomStringUtils.randomNumeric(8));
}
- @Test
+ @Test (timeout=180000)
public void testCompactionFromPrimary() throws IOException {
Path rootDir = new Path(dir + "testRegionReplicaSecondary");
FSUtils.setRootDir(TEST_UTIL.getConfiguration(), rootDir);
@@ -4912,7 +4912,7 @@ public class TestHRegion {
* Test that we get the expected flush results back
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testFlushResult() throws IOException {
String method = name.getMethodName();
byte[] tableName = Bytes.toBytes(method);
@@ -5706,7 +5706,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testWriteRequestsCounter() throws IOException {
byte[] fam = Bytes.toBytes("info");
byte[][] families = { fam };
@@ -5732,7 +5732,7 @@ public class TestHRegion {
this.region = null;
}
- @Test
+ @Test (timeout=180000)
@SuppressWarnings("unchecked")
public void testOpenRegionWrittenToWAL() throws Exception {
final ServerName serverName = ServerName.valueOf("testOpenRegionWrittenToWAL", 100, 42);
@@ -5803,7 +5803,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
@SuppressWarnings("unchecked")
public void testCloseRegionWrittenToWAL() throws Exception {
final ServerName serverName = ServerName.valueOf("testCloseRegionWrittenToWAL", 100, 42);
@@ -5915,7 +5915,7 @@ public class TestHRegion {
}
}
- @Test
+ @Test (timeout=180000)
public void testCellTTLs() throws IOException {
IncrementingEnvironmentEdge edge = new IncrementingEnvironmentEdge();
EnvironmentEdgeManager.injectEdge(edge);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java
index 5f792fa..63f9740 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java
@@ -52,7 +52,7 @@ public class TestHRegionFileSystem {
private static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final Log LOG = LogFactory.getLog(TestHRegionFileSystem.class);
- @Test
+ @Test (timeout=60000)
public void testOnDiskRegionCreation() throws IOException {
Path rootDir = TEST_UTIL.getDataTestDirOnTestFS("testOnDiskRegionCreation");
FileSystem fs = TEST_UTIL.getTestFileSystem();
@@ -84,7 +84,7 @@ public class TestHRegionFileSystem {
fs.delete(rootDir, true);
}
- @Test
+ @Test (timeout=60000)
public void testNonIdempotentOpsWithRetries() throws IOException {
Path rootDir = TEST_UTIL.getDataTestDirOnTestFS("testOnDiskRegionCreation");
FileSystem fs = TEST_UTIL.getTestFileSystem();
@@ -208,7 +208,7 @@ public class TestHRegionFileSystem {
}
}
- @Test
+ @Test (timeout=60000)
public void testTempAndCommit() throws IOException {
Path rootDir = TEST_UTIL.getDataTestDirOnTestFS("testTempAndCommit");
FileSystem fs = TEST_UTIL.getTestFileSystem();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
index 5fde726..50fb39b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
@@ -50,7 +50,7 @@ import com.google.protobuf.ByteString;
@Category({RegionServerTests.class, SmallTests.class})
public class TestHRegionInfo {
- @Test
+ @Test (timeout=60000)
public void testPb() throws DeserializationException {
HRegionInfo hri = HRegionInfo.FIRST_META_REGIONINFO;
byte [] bytes = hri.toByteArray();
@@ -58,7 +58,7 @@ public class TestHRegionInfo {
assertTrue(hri.equals(pbhri));
}
- @Test
+ @Test (timeout=60000)
public void testReadAndWriteHRegionInfoFile() throws IOException, InterruptedException {
HBaseTestingUtility htu = new HBaseTestingUtility();
HRegionInfo hri = HRegionInfo.FIRST_META_REGIONINFO;
@@ -90,7 +90,7 @@ public class TestHRegionInfo {
return statuses[0].getModificationTime();
}
- @Test
+ @Test (timeout=60000)
public void testCreateHRegionInfoName() throws Exception {
String tableName = "tablename";
final TableName tn = TableName.valueOf(tableName);
@@ -114,7 +114,7 @@ public class TestHRegionInfo {
nameStr);
}
- @Test
+ @Test (timeout=60000)
public void testContainsRange() {
HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf("testtable"));
HRegionInfo hri = new HRegionInfo(
@@ -142,7 +142,7 @@ public class TestHRegionInfo {
}
}
- @Test
+ @Test (timeout=60000)
public void testLastRegionCompare() {
HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf("testtable"));
HRegionInfo hrip = new HRegionInfo(
@@ -152,12 +152,12 @@ public class TestHRegionInfo {
assertTrue(hrip.compareTo(hric) > 0);
}
- @Test
+ @Test (timeout=60000)
public void testMetaTables() {
assertTrue(HRegionInfo.FIRST_META_REGIONINFO.isMetaTable());
}
- @Test
+ @Test (timeout=60000)
public void testComparator() {
TableName tablename = TableName.valueOf("comparatorTablename");
byte[] empty = new byte[0];
@@ -169,7 +169,7 @@ public class TestHRegionInfo {
assertTrue(newer.compareTo(newer) == 0);
}
- @Test
+ @Test (timeout=60000)
public void testRegionNameForRegionReplicas() throws Exception {
String tableName = "tablename";
final TableName tn = TableName.valueOf(tableName);
@@ -197,7 +197,7 @@ public class TestHRegionInfo {
String.format(HRegionInfo.REPLICA_ID_FORMAT, 0xFFFF), nameStr);
}
- @Test
+ @Test (timeout=60000)
public void testParseName() throws IOException {
TableName tableName = TableName.valueOf("testParseName");
byte[] startKey = Bytes.toBytes("startKey");
@@ -225,7 +225,7 @@ public class TestHRegionInfo {
String.format(HRegionInfo.REPLICA_ID_FORMAT, replicaId)), fields[3]);
}
- @Test
+ @Test (timeout=60000)
public void testConvert() {
TableName tableName = TableName.valueOf("ns1:table1");
byte[] startKey = Bytes.toBytes("startKey");
@@ -261,7 +261,7 @@ public class TestHRegionInfo {
assertEquals(expectedHri, convertedHri);
}
- @Test
+ @Test (timeout=60000)
public void testRegionDetailsForDisplay() throws IOException {
byte[] startKey = new byte[] {0x01, 0x01, 0x02, 0x03};
byte[] endKey = new byte[] {0x01, 0x01, 0x02, 0x04};
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
index 09e9d5e..84918b6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
@@ -192,7 +192,7 @@ public class TestHRegionReplayEvents {
// 8. replay flush form an earlier seqId (test ignoring seqIds)
// 9. start flush does not prevent region from closing.
- @Test
+ @Test (timeout=180000)
public void testRegionReplicaSecondaryCannotFlush() throws IOException {
// load some data and flush ensure that the secondary replica will not execute the flush
@@ -278,7 +278,7 @@ public class TestHRegionReplayEvents {
TEST_UTIL.getConfiguration());
}
- @Test
+ @Test (timeout=180000)
public void testReplayFlushesAndCompactions() throws IOException {
// initiate a secondary region with some data.
@@ -377,7 +377,7 @@ public class TestHRegionReplayEvents {
* Tests cases where we prepare a flush with some seqId and we receive other flush start markers
* equal to, greater or less than the previous flush start marker.
*/
- @Test
+ @Test (timeout=180000)
public void testReplayFlushStartMarkers() throws IOException {
// load some data to primary and flush. 1 flush and some more unflushed data
putDataWithFlushes(primaryRegion, 100, 100, 100);
@@ -482,7 +482,7 @@ public class TestHRegionReplayEvents {
* Tests the case where we prepare a flush with some seqId and we receive a flush commit marker
* less than the previous flush start marker.
*/
- @Test
+ @Test (timeout=180000)
public void testReplayFlushCommitMarkerSmallerThanFlushStartMarker() throws IOException {
// load some data to primary and flush. 2 flushes and some more unflushed data
putDataWithFlushes(primaryRegion, 100, 200, 100);
@@ -573,7 +573,7 @@ public class TestHRegionReplayEvents {
* Tests the case where we prepare a flush with some seqId and we receive a flush commit marker
* larger than the previous flush start marker.
*/
- @Test
+ @Test (timeout=180000)
public void testReplayFlushCommitMarkerLargerThanFlushStartMarker() throws IOException {
// load some data to primary and flush. 1 flush and some more unflushed data
putDataWithFlushes(primaryRegion, 100, 100, 100);
@@ -665,7 +665,7 @@ public class TestHRegionReplayEvents {
* The memstore edits should be dropped after the flush commit replay since they should be in
* flushed files
*/
- @Test
+ @Test (timeout=180000)
public void testReplayFlushCommitMarkerWithoutFlushStartMarkerDroppableMemstore()
throws IOException {
testReplayFlushCommitMarkerWithoutFlushStartMarker(true);
@@ -676,7 +676,7 @@ public class TestHRegionReplayEvents {
* The memstore edits should be not dropped after the flush commit replay since not every edit
* will be in flushed files (based on seqId)
*/
- @Test
+ @Test (timeout=180000)
public void testReplayFlushCommitMarkerWithoutFlushStartMarkerNonDroppableMemstore()
throws IOException {
testReplayFlushCommitMarkerWithoutFlushStartMarker(false);
@@ -779,7 +779,7 @@ public class TestHRegionReplayEvents {
/**
* Tests replaying region open markers from primary region. Checks whether the files are picked up
*/
- @Test
+ @Test (timeout=180000)
public void testReplayRegionOpenEvent() throws IOException {
putDataWithFlushes(primaryRegion, 100, 0, 100); // no flush
int numRows = 100;
@@ -859,7 +859,7 @@ public class TestHRegionReplayEvents {
* Tests the case where we replay a region open event after a flush start but before receiving
* flush commit
*/
- @Test
+ @Test (timeout=180000)
public void testReplayRegionOpenEventAfterFlushStart() throws IOException {
putDataWithFlushes(primaryRegion, 100, 100, 100);
int numRows = 200;
@@ -938,7 +938,7 @@ public class TestHRegionReplayEvents {
* Tests whether edits coming in for replay are skipped which have smaller seq id than the seqId
* of the last replayed region open event.
*/
- @Test
+ @Test (timeout=180000)
public void testSkippingEditsWithSmallerSeqIdAfterRegionOpenEvent() throws IOException {
putDataWithFlushes(primaryRegion, 100, 100, 0);
int numRows = 100;
@@ -996,7 +996,7 @@ public class TestHRegionReplayEvents {
}
}
- @Test
+ @Test (timeout=180000)
public void testReplayFlushSeqIds() throws IOException {
// load some data to primary and flush
int start = 0;
@@ -1040,7 +1040,7 @@ public class TestHRegionReplayEvents {
verifyData(secondaryRegion, 0, 100, cq, families);
}
- @Test
+ @Test (timeout=180000)
public void testSeqIdsFromReplay() throws IOException {
// test the case where seqId's coming from replayed WALEdits are made persisted with their
// original seqIds and they are made visible through mvcc read point upon replay
@@ -1083,7 +1083,7 @@ public class TestHRegionReplayEvents {
* @throws IOException
*/
@SuppressWarnings("unchecked")
- @Test
+ @Test (timeout=180000)
public void testSecondaryRegionDoesNotWriteRegionEventsToWAL() throws IOException {
secondaryRegion.close();
walSecondary = spy(walSecondary);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
index af49556..cf70a62 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
@@ -291,7 +291,7 @@ public class TestHRegionServerBulkLoad {
/**
* Atomic bulk load.
*/
- @Test
+ @Test (timeout=300000)
public void testAtomicBulkLoad() throws Exception {
TableName TABLE_NAME = TableName.valueOf("atomicBulkLoad");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
index b96a6a5..12b970f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
@@ -55,7 +55,7 @@ public class TestHeapMemoryManager {
private long maxHeapSize = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax();
- @Test
+ @Test (timeout=60000)
public void testAutoTunerShouldBeOffWhenMaxMinRangesForMemstoreIsNotGiven() throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.setFloat(HeapMemoryManager.BLOCK_CACHE_SIZE_MAX_RANGE_KEY, 0.75f);
@@ -65,7 +65,7 @@ public class TestHeapMemoryManager {
assertFalse(manager.isTunerOn());
}
- @Test
+ @Test (timeout=60000)
public void testAutoTunerShouldBeOffWhenMaxMinRangesForBlockCacheIsNotGiven() throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.setFloat(HeapMemoryManager.MEMSTORE_SIZE_MAX_RANGE_KEY, 0.75f);
@@ -75,7 +75,7 @@ public class TestHeapMemoryManager {
assertFalse(manager.isTunerOn());
}
- @Test
+ @Test (timeout=60000)
public void testWhenMemstoreAndBlockCacheMaxMinChecksFails() throws Exception {
BlockCacheStub blockCache = new BlockCacheStub(0);
MemstoreFlusherStub memStoreFlusher = new MemstoreFlusherStub(0);
@@ -97,7 +97,7 @@ public class TestHeapMemoryManager {
}
}
- @Test
+ @Test (timeout=60000)
public void testWhenClusterIsWriteHeavy() throws Exception {
BlockCacheStub blockCache = new BlockCacheStub((long) (maxHeapSize * 0.4));
MemstoreFlusherStub memStoreFlusher = new MemstoreFlusherStub((long) (maxHeapSize * 0.4));
@@ -138,7 +138,7 @@ public class TestHeapMemoryManager {
blockCache.maxSize);
}
- @Test
+ @Test (timeout=60000)
public void testWhenClusterIsReadHeavy() throws Exception {
BlockCacheStub blockCache = new BlockCacheStub((long) (maxHeapSize * 0.4));
MemstoreFlusherStub memStoreFlusher = new MemstoreFlusherStub((long) (maxHeapSize * 0.4));
@@ -174,7 +174,7 @@ public class TestHeapMemoryManager {
blockCache.maxSize);
}
- @Test
+ @Test (timeout=60000)
public void testPluggingInHeapMemoryTuner() throws Exception {
BlockCacheStub blockCache = new BlockCacheStub((long) (maxHeapSize * 0.4));
MemstoreFlusherStub memStoreFlusher = new MemstoreFlusherStub((long) (maxHeapSize * 0.4));
@@ -205,7 +205,7 @@ public class TestHeapMemoryManager {
assertHeapSpace(0.05f, memStoreFlusher.memstoreSize);// Memstore
}
- @Test
+ @Test (timeout=60000)
public void testWhenSizeGivenByHeapTunerGoesOutsideRange() throws Exception {
BlockCacheStub blockCache = new BlockCacheStub((long) (maxHeapSize * 0.4));
MemstoreFlusherStub memStoreFlusher = new MemstoreFlusherStub((long) (maxHeapSize * 0.4));
@@ -230,7 +230,7 @@ public class TestHeapMemoryManager {
assertHeapSpace(0.1f, blockCache.maxSize);
}
- @Test
+ @Test (timeout=60000)
public void testWhenCombinedHeapSizesFromTunerGoesOutSideMaxLimit() throws Exception {
BlockCacheStub blockCache = new BlockCacheStub((long) (maxHeapSize * 0.4));
MemstoreFlusherStub memStoreFlusher = new MemstoreFlusherStub((long) (maxHeapSize * 0.4));
@@ -255,7 +255,7 @@ public class TestHeapMemoryManager {
assertEquals(oldBlockCacheSize, blockCache.maxSize);
}
- @Test
+ @Test (timeout=60000)
public void testWhenL2BlockCacheIsOnHeap() throws Exception {
HeapMemoryManager heapMemoryManager = null;
BlockCacheStub blockCache = new BlockCacheStub((long) (maxHeapSize * 0.4));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
index 81d681c..7b48b25 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
@@ -72,7 +72,7 @@ public class TestJoinedScanners {
private static int selectionRatio = 30;
private static int valueWidth = 128 * 1024;
- @Test
+ @Test (timeout=300000)
public void testJoinedScanners() throws Exception {
String dataNodeHosts[] = new String[] { "host1", "host2", "host3" };
int regionServersCount = 3;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
index 49ee7e9..64abd9f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
@@ -93,7 +93,7 @@ public class TestKeepDeletes {
* Column Delete markers are versioned
* Time range scan of deleted rows are possible
*/
- @Test
+ @Test (timeout=60000)
public void testBasicScenario() throws Exception {
// keep 3 versions, rows do not expire
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
@@ -190,7 +190,7 @@ public class TestKeepDeletes {
* if the store does not have KEEP_DELETED_CELLS enabled.
* (can be changed easily)
*/
- @Test
+ @Test (timeout=60000)
public void testRawScanWithoutKeepingDeletes() throws Exception {
// KEEP_DELETED_CELLS is NOT enabled
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
@@ -235,7 +235,7 @@ public class TestKeepDeletes {
/**
* basic verification of existing behavior
*/
- @Test
+ @Test (timeout=60000)
public void testWithoutKeepingDeletes() throws Exception {
// KEEP_DELETED_CELLS is NOT enabled
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
@@ -280,7 +280,7 @@ public class TestKeepDeletes {
/**
* The ExplicitColumnTracker does not support "raw" scanning.
*/
- @Test
+ @Test (timeout=60000)
public void testRawScanWithColumns() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
HConstants.FOREVER, KeepDeletedCells.TRUE);
@@ -304,7 +304,7 @@ public class TestKeepDeletes {
/**
* Verify that "raw" scanning mode return delete markers and deletes rows.
*/
- @Test
+ @Test (timeout=60000)
public void testRawScan() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
HConstants.FOREVER, KeepDeletedCells.TRUE);
@@ -394,7 +394,7 @@ public class TestKeepDeletes {
/**
* Verify that delete markers are removed from an otherwise empty store.
*/
- @Test
+ @Test (timeout=60000)
public void testDeleteMarkerExpirationEmptyStore() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
HConstants.FOREVER, KeepDeletedCells.TRUE);
@@ -437,7 +437,7 @@ public class TestKeepDeletes {
/**
* Test delete marker removal from store files.
*/
- @Test
+ @Test (timeout=60000)
public void testDeleteMarkerExpiration() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
HConstants.FOREVER, KeepDeletedCells.TRUE);
@@ -500,7 +500,7 @@ public class TestKeepDeletes {
/**
* Test delete marker removal from store files.
*/
- @Test
+ @Test (timeout=60000)
public void testWithOldRow() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
HConstants.FOREVER, KeepDeletedCells.TRUE);
@@ -578,7 +578,7 @@ public class TestKeepDeletes {
/**
* Verify correct range demarcation
*/
- @Test
+ @Test (timeout=60000)
public void testRanges() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 3,
HConstants.FOREVER, KeepDeletedCells.TRUE);
@@ -660,7 +660,7 @@ public class TestKeepDeletes {
* with their respective puts and removed correctly by
* versioning (i.e. not relying on the store earliestPutTS).
*/
- @Test
+ @Test (timeout=60000)
public void testDeleteMarkerVersioning() throws Exception {
HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
HConstants.FOREVER, KeepDeletedCells.TRUE);
@@ -804,7 +804,7 @@ public class TestKeepDeletes {
* Test keeping deleted rows together with min versions set
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testWithMinVersions() throws Exception {
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, KeepDeletedCells.TRUE);
@@ -883,7 +883,7 @@ public class TestKeepDeletes {
* Test keeping deleted rows together with min versions set
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testWithTTL() throws Exception {
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 1, 1000, 1, KeepDeletedCells.TTL);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java
index 86a15ff..bdd3706 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java
@@ -69,7 +69,7 @@ public class TestKeyValueHeap extends HBaseTestCase {
col5 = Bytes.toBytes("col5");
}
- @Test
+ @Test (timeout=60000)
public void testSorted() throws IOException{
//Cases that need to be checked are:
//1. The "smallest" KeyValue is in the same scanners as current
@@ -132,7 +132,7 @@ public class TestKeyValueHeap extends HBaseTestCase {
}
- @Test
+ @Test (timeout=60000)
public void testSeek() throws IOException {
//Cases:
//1. Seek KeyValue that is not in scanner
@@ -181,7 +181,7 @@ public class TestKeyValueHeap extends HBaseTestCase {
}
- @Test
+ @Test (timeout=60000)
public void testScannerLeak() throws IOException {
// Test for unclosed scanners (HBASE-1927)
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
index df43bd0..c902e59 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
@@ -121,7 +121,7 @@ public class TestMajorCompaction {
* right answer in this case - and that it just basically works.
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testMajorCompactingToNoOutput() throws IOException {
createStoreFile(r);
for (int i = 0; i < compactionThreshold; i++) {
@@ -156,17 +156,17 @@ public class TestMajorCompaction {
* Assert deletes get cleaned up.
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testMajorCompaction() throws Exception {
majorCompaction();
}
- @Test
+ @Test (timeout=180000)
public void testDataBlockEncodingInCacheOnly() throws Exception {
majorCompactionWithDataBlockEncoding(true);
}
- @Test
+ @Test (timeout=180000)
public void testDataBlockEncodingEverywhere() throws Exception {
majorCompactionWithDataBlockEncoding(false);
}
@@ -300,7 +300,7 @@ public class TestMajorCompaction {
assertEquals("Should not see anything after TTL has expired", 0, count);
}
- @Test
+ @Test (timeout=180000)
public void testTimeBasedMajorCompaction() throws Exception {
// create 2 storefiles and force a major compaction to reset the time
int delay = 10 * 1000; // 10 sec
@@ -405,7 +405,7 @@ public class TestMajorCompaction {
/**
* Test for HBASE-5920 - Test user requested major compactions always occurring
*/
- @Test
+ @Test (timeout=180000)
public void testNonUserMajorCompactionRequest() throws Exception {
Store store = r.getStore(COLUMN_FAMILY);
createStoreFile(r);
@@ -425,7 +425,7 @@ public class TestMajorCompaction {
/**
* Test for HBASE-5920
*/
- @Test
+ @Test (timeout=180000)
public void testUserMajorCompactionRequest() throws IOException{
Store store = r.getStore(COLUMN_FAMILY);
createStoreFile(r);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMasterAddressTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMasterAddressTracker.java
index 4c4c940..59f04eb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMasterAddressTracker.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMasterAddressTracker.java
@@ -58,7 +58,7 @@ public class TestMasterAddressTracker {
* but rather acts directly on ZK.
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testMasterAddressTrackerFromZK() throws Exception {
ZooKeeperWatcher zk = new ZooKeeperWatcher(TEST_UTIL.getConfiguration(),
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreChunkPool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreChunkPool.java
index 80333e8..543ca4e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreChunkPool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreChunkPool.java
@@ -65,7 +65,7 @@ public class TestMemStoreChunkPool {
chunkPool.clearChunks();
}
- @Test
+ @Test (timeout=60000)
public void testReusingChunks() {
Random rand = new Random();
MemStoreLAB mslab = new HeapMemStoreLAB(conf);
@@ -96,7 +96,7 @@ public class TestMemStoreChunkPool {
assertEquals(chunkCount - 1, chunkPool.getPoolSize());
}
- @Test
+ @Test (timeout=60000)
public void testPuttingBackChunksAfterFlushing() throws UnexpectedStateException {
byte[] row = Bytes.toBytes("testrow");
byte[] fam = Bytes.toBytes("testfamily");
@@ -130,7 +130,7 @@ public class TestMemStoreChunkPool {
}
- @Test
+ @Test (timeout=60000)
public void testPuttingBackChunksWithOpeningScanner()
throws UnexpectedStateException {
byte[] row = Bytes.toBytes("testrow");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java
index 170bdd4..072cdc4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java
@@ -45,7 +45,7 @@ public class TestMemStoreLAB {
/**
* Test a bunch of random allocations
*/
- @Test
+ @Test (timeout=60000)
public void testLABRandomAllocation() {
Random rand = new Random();
MemStoreLAB mslab = new HeapMemStoreLAB();
@@ -69,7 +69,7 @@ public class TestMemStoreLAB {
}
}
- @Test
+ @Test (timeout=60000)
public void testLABLargeAllocation() {
MemStoreLAB mslab = new HeapMemStoreLAB();
ByteRange alloc = mslab.allocateBytes(2*1024*1024);
@@ -81,7 +81,7 @@ public class TestMemStoreLAB {
* Test allocation from lots of threads, making sure the results don't
* overlap in any way
*/
- @Test
+ @Test (timeout=60000)
public void testLABThreading() throws Exception {
Configuration conf = new Configuration();
MultithreadedTestUtil.TestContext ctx =
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegion.java
index ddaee3d..4aa82f2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegion.java
@@ -31,7 +31,7 @@ public class TestMetricsRegion {
public MetricsAssertHelper HELPER = CompatibilityFactory.getInstance(MetricsAssertHelper.class);
- @Test
+ @Test (timeout=60000)
public void testRegionWrapperMetrics() {
MetricsRegion mr = new MetricsRegion(new MetricsRegionWrapperStub());
MetricsRegionAggregateSource agg = mr.getSource().getAggregateSource();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionServer.java
index e777c1d..fc245f9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionServer.java
@@ -52,7 +52,7 @@ public class TestMetricsRegionServer {
serverSource = rsm.getMetricsSource();
}
- @Test
+ @Test (timeout=60000)
public void testWrapperSource() {
HELPER.assertTag("serverName", "test", serverSource);
HELPER.assertTag("clusterId", "tClusterId", serverSource);
@@ -89,13 +89,13 @@ public class TestMetricsRegionServer {
HELPER.assertCounter("updatesBlockedTime", 419, serverSource);
}
- @Test
+ @Test (timeout=60000)
public void testConstuctor() {
assertNotNull("There should be a hadoop1/hadoop2 metrics source", rsm.getMetricsSource() );
assertNotNull("The RegionServerMetricsWrapper should be accessable", rsm.getRegionServerWrapper());
}
- @Test
+ @Test (timeout=60000)
public void testSlowCount() {
for (int i=0; i < 12; i ++) {
rsm.updateAppend(12);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java
index 16f29dc..a2ab608 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java
@@ -64,7 +64,7 @@ public class TestMinVersions {
/**
* Verify behavior of getClosestBefore(...)
*/
- @Test
+ @Test (timeout=60000)
public void testGetClosestBefore() throws Exception {
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 1, 1000, 1, KeepDeletedCells.FALSE);
@@ -113,7 +113,7 @@ public class TestMinVersions {
* Test mixed memstore and storefile scanning
* with minimum versions.
*/
- @Test
+ @Test (timeout=60000)
public void testStoreMemStore() throws Exception {
// keep 3 versions minimum
HTableDescriptor htd =
@@ -169,7 +169,7 @@ public class TestMinVersions {
/**
* Make sure the Deletes behave as expected with minimum versions
*/
- @Test
+ @Test (timeout=60000)
public void testDelete() throws Exception {
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 3, 1000, 1, KeepDeletedCells.FALSE);
@@ -228,7 +228,7 @@ public class TestMinVersions {
/**
* Make sure the memstor behaves correctly with minimum versions
*/
- @Test
+ @Test (timeout=60000)
public void testMemStore() throws Exception {
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 2, 1000, 1, KeepDeletedCells.FALSE);
@@ -303,7 +303,7 @@ public class TestMinVersions {
/**
* Verify basic minimum versions functionality
*/
- @Test
+ @Test (timeout=60000)
public void testBaseCase() throws Exception {
// 1 version minimum, 1000 versions maximum, ttl = 1s
HTableDescriptor htd =
@@ -396,7 +396,7 @@ public class TestMinVersions {
* Verify that basic filters still behave correctly with
* minimum versions enabled.
*/
- @Test
+ @Test (timeout=60000)
public void testFilters() throws Exception {
HTableDescriptor htd =
hbu.createTableDescriptor(name.getMethodName(), 2, 1000, 1, KeepDeletedCells.FALSE);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMiniBatchOperationInProgress.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMiniBatchOperationInProgress.java
index 15931c6..d59e486 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMiniBatchOperationInProgress.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMiniBatchOperationInProgress.java
@@ -34,7 +34,7 @@ import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class})
public class TestMiniBatchOperationInProgress {
- @Test
+ @Test (timeout=60000)
public void testMiniBatchOperationInProgressMethods() {
Pair[] operations = new Pair[10];
OperationStatus[] retCodeDetails = new OperationStatus[10];
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
index 7ac6eef..c70c5a3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
@@ -95,13 +95,13 @@ public class TestMinorCompaction {
wal.close();
}
- @Test
+ @Test (timeout=180000)
public void testMinorCompactionWithDeleteRow() throws Exception {
Delete deleteRow = new Delete(secondRowBytes);
testMinorCompactionWithDelete(deleteRow);
}
- @Test
+ @Test (timeout=180000)
public void testMinorCompactionWithDeleteColumn1() throws Exception {
Delete dc = new Delete(secondRowBytes);
/* delete all timestamps in the column */
@@ -109,7 +109,7 @@ public class TestMinorCompaction {
testMinorCompactionWithDelete(dc);
}
- @Test
+ @Test (timeout=180000)
public void testMinorCompactionWithDeleteColumn2() throws Exception {
Delete dc = new Delete(secondRowBytes);
dc.deleteColumn(fam2, col2);
@@ -122,14 +122,14 @@ public class TestMinorCompaction {
testMinorCompactionWithDelete(dc, 3);
}
- @Test
+ @Test (timeout=180000)
public void testMinorCompactionWithDeleteColumnFamily() throws Exception {
Delete deleteCF = new Delete(secondRowBytes);
deleteCF.deleteFamily(fam2);
testMinorCompactionWithDelete(deleteCF);
}
- @Test
+ @Test (timeout=180000)
public void testMinorCompactionWithDeleteVersion1() throws Exception {
Delete deleteVersion = new Delete(secondRowBytes);
deleteVersion.deleteColumns(fam2, col2, 2);
@@ -139,7 +139,7 @@ public class TestMinorCompaction {
testMinorCompactionWithDelete(deleteVersion, 1);
}
- @Test
+ @Test (timeout=180000)
public void testMinorCompactionWithDeleteVersion2() throws Exception {
Delete deleteVersion = new Delete(secondRowBytes);
deleteVersion.deleteColumn(fam2, col2, 1);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
index 9f0b339..d727d70 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
@@ -143,7 +143,7 @@ public class TestMultiColumnScanner {
DataBlockEncoding.NONE;
}
- @Test
+ @Test (timeout=180000)
public void testMultiColumnScanner() throws IOException {
HRegion region = TEST_UTIL.createTestRegion(TABLE_NAME,
new HColumnDescriptor(FAMILY)
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java
index ea668b0..b4ab945 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java
@@ -107,7 +107,7 @@ public class TestParallelPut {
/**
* Test one put command.
*/
- @Test
+ @Test (timeout=180000)
public void testPut() throws IOException {
LOG.info("Starting testPut");
this.region = initHRegion(tableName, getName(), fam1);
@@ -124,7 +124,7 @@ public class TestParallelPut {
/**
* Test multi-threaded Puts.
*/
- @Test
+ @Test (timeout=180000)
public void testParallelPuts() throws IOException {
LOG.info("Starting testParallelPuts");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java
index dc18408..5867b08 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java
@@ -64,7 +64,7 @@ public class TestPriorityRpc {
priority = regionServer.rpcServices.getPriority();
}
- @Test
+ @Test (timeout=180000)
public void testQosFunctionForMeta() throws IOException {
priority = regionServer.rpcServices.getPriority();
RequestHeader.Builder headerBuilder = RequestHeader.newBuilder();
@@ -98,7 +98,7 @@ public class TestPriorityRpc {
assertEquals(HConstants.SYSTEMTABLE_QOS, priority.getPriority(header, getRequest));
}
- @Test
+ @Test (timeout=180000)
public void testQosFunctionWithoutKnownArgument() throws IOException {
//The request is not using any of the
//known argument classes (it uses one random request class)
@@ -111,7 +111,7 @@ public class TestPriorityRpc {
assertEquals(HConstants.NORMAL_QOS, qosFunc.getPriority(header, null));
}
- @Test
+ @Test (timeout=180000)
public void testQosFunctionForScanMethod() throws IOException {
RequestHeader.Builder headerBuilder = RequestHeader.newBuilder();
headerBuilder.setMethodName("Scan");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQosFunction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQosFunction.java
index 2b2ecda..e60b0a4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQosFunction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQosFunction.java
@@ -38,7 +38,7 @@ import com.google.protobuf.Message;
*/
@Category({RegionServerTests.class, SmallTests.class})
public class TestQosFunction {
- @Test
+ @Test (timeout=60000)
public void testPriority() {
Configuration conf = HBaseConfiguration.create();
RSRpcServices rpcServices = Mockito.mock(RSRpcServices.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java
index 6476288..aea9351 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java
@@ -129,7 +129,7 @@ public class TestQueryMatcher extends HBaseTestCase {
}
}
- @Test
+ @Test (timeout=60000)
public void testMatch_ExplicitColumns()
throws IOException {
//Moving up from the Tracker by using Gets and List instead
@@ -147,7 +147,7 @@ public class TestQueryMatcher extends HBaseTestCase {
_testMatch_ExplicitColumns(scan, expected);
}
- @Test
+ @Test (timeout=60000)
public void testMatch_ExplicitColumnsWithLookAhead()
throws IOException {
//Moving up from the Tracker by using Gets and List instead
@@ -168,7 +168,7 @@ public class TestQueryMatcher extends HBaseTestCase {
}
- @Test
+ @Test (timeout=60000)
public void testMatch_Wildcard()
throws IOException {
//Moving up from the Tracker by using Gets and List instead
@@ -224,7 +224,7 @@ public class TestQueryMatcher extends HBaseTestCase {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testMatch_ExpiredExplicit()
throws IOException {
@@ -279,7 +279,7 @@ public class TestQueryMatcher extends HBaseTestCase {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testMatch_ExpiredWildcard()
throws IOException {
@@ -325,7 +325,7 @@ public class TestQueryMatcher extends HBaseTestCase {
}
}
- @Test
+ @Test (timeout=60000)
public void testMatch_PartialRangeDropDeletes() throws Exception {
// Some ranges.
testDropDeletes(
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java
index 732df4b..cb4525f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java
@@ -100,12 +100,12 @@ public class TestRSStatusServlet {
Mockito.doReturn(rms).when(rs).getRegionServerMetrics();
}
- @Test
+ @Test (timeout=60000)
public void testBasic() throws IOException, ServiceException {
new RSStatusTmpl().render(new StringWriter(), rs);
}
- @Test
+ @Test (timeout=60000)
public void testWithRegions() throws IOException, ServiceException {
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("mytable"));
List regions = Lists.newArrayList(
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionFavoredNodes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionFavoredNodes.java
index 8e7fe04..7225428 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionFavoredNodes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionFavoredNodes.java
@@ -85,7 +85,7 @@ public class TestRegionFavoredNodes {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testFavoredNodes() throws Exception {
Assume.assumeTrue(createWithFavoredNode != null);
// Get the addresses of the datanodes in the cluster.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java
index 8bcd89e..e940399 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java
@@ -114,7 +114,7 @@ public class TestRegionMergeTransaction {
* {@link #region_b}
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testPrepare() throws IOException {
prepareOnGoodRegions();
}
@@ -134,7 +134,7 @@ public class TestRegionMergeTransaction {
/**
* Test merging the same region
*/
- @Test
+ @Test (timeout=60000)
public void testPrepareWithSameRegion() throws IOException {
RegionMergeTransaction mt = new RegionMergeTransaction(this.region_a,
this.region_a, true);
@@ -145,7 +145,7 @@ public class TestRegionMergeTransaction {
/**
* Test merging two not adjacent regions under a common merge
*/
- @Test
+ @Test (timeout=60000)
public void testPrepareWithRegionsNotAdjacent() throws IOException {
RegionMergeTransaction mt = new RegionMergeTransaction(this.region_a,
this.region_c, false);
@@ -156,7 +156,7 @@ public class TestRegionMergeTransaction {
/**
* Test merging two not adjacent regions under a compulsory merge
*/
- @Test
+ @Test (timeout=60000)
public void testPrepareWithRegionsNotAdjacentUnderCompulsory()
throws IOException {
RegionMergeTransaction mt = new RegionMergeTransaction(region_a, region_c,
@@ -173,7 +173,7 @@ public class TestRegionMergeTransaction {
/**
* Pass a reference store
*/
- @Test
+ @Test (timeout=60000)
public void testPrepareWithRegionsWithReference() throws IOException {
HStore storeMock = Mockito.mock(HStore.class);
when(storeMock.hasReferences()).thenReturn(true);
@@ -187,7 +187,7 @@ public class TestRegionMergeTransaction {
mt.prepare(null));
}
- @Test
+ @Test (timeout=60000)
public void testPrepareWithClosedRegion() throws IOException {
this.region_a.close();
RegionMergeTransaction mt = new RegionMergeTransaction(this.region_a,
@@ -199,7 +199,7 @@ public class TestRegionMergeTransaction {
* Test merging regions which are merged regions and has reference in hbase:meta all
* the same
*/
- @Test
+ @Test (timeout=60000)
public void testPrepareWithRegionsWithMergeReference() throws IOException {
RegionMergeTransaction mt = new RegionMergeTransaction(region_a, region_b,
false);
@@ -211,7 +211,7 @@ public class TestRegionMergeTransaction {
assertFalse(spyMT.prepare(null));
}
- @Test
+ @Test (timeout=60000)
public void testWholesomeMerge() throws IOException, InterruptedException {
final int rowCountOfRegionA = loadRegion(this.region_a, CF, true);
final int rowCountOfRegionB = loadRegion(this.region_b, CF, true);
@@ -255,7 +255,7 @@ public class TestRegionMergeTransaction {
assertTrue(!this.region_b.lock.writeLock().isHeldByCurrentThread());
}
- @Test
+ @Test (timeout=60000)
public void testRollback() throws IOException, InterruptedException {
final int rowCountOfRegionA = loadRegion(this.region_a, CF, true);
final int rowCountOfRegionB = loadRegion(this.region_b, CF, true);
@@ -315,7 +315,7 @@ public class TestRegionMergeTransaction {
assertTrue(!this.region_b.lock.writeLock().isHeldByCurrentThread());
}
- @Test
+ @Test (timeout=60000)
public void testFailAfterPONR() throws IOException, KeeperException, InterruptedException {
final int rowCountOfRegionA = loadRegion(this.region_a, CF, true);
final int rowCountOfRegionB = loadRegion(this.region_b, CF, true);
@@ -355,7 +355,7 @@ public class TestRegionMergeTransaction {
assertTrue(TEST_UTIL.getTestFileSystem().exists(mergedRegionDir));
}
- @Test
+ @Test (timeout=60000)
public void testMeregedRegionBoundary() {
TableName tableName =
TableName.valueOf("testMeregedRegionBoundary");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
index f4b6f02..e1224ad 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
@@ -121,7 +121,7 @@ public class TestRegionMergeTransactionOnCluster {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=300000)
public void testWholesomeMerge() throws Exception {
LOG.info("Starting testWholesomeMerge");
final TableName tableName =
@@ -172,7 +172,7 @@ public class TestRegionMergeTransactionOnCluster {
* Not really restarting the master. Simulate it by clear of new region
* state since it is not persisted, will be lost after master restarts.
*/
- @Test
+ @Test (timeout=300000)
public void testMergeAndRestartingMaster() throws Exception {
LOG.info("Starting testMergeAndRestartingMaster");
final TableName tableName = TableName.valueOf("testMergeAndRestartingMaster");
@@ -194,7 +194,7 @@ public class TestRegionMergeTransactionOnCluster {
}
@SuppressWarnings("deprecation")
- @Test
+ @Test (timeout=300000)
public void testCleanMergeReference() throws Exception {
LOG.info("Starting testCleanMergeReference");
admin.enableCatalogJanitor(false);
@@ -275,7 +275,7 @@ public class TestRegionMergeTransactionOnCluster {
* They are in one test case so that we don't have to create
* many tables, and these tests are simple.
*/
- @Test
+ @Test (timeout=300000)
public void testMerge() throws Exception {
LOG.info("Starting testMerge");
final TableName tableName = TableName.valueOf("testMerge");
@@ -322,7 +322,7 @@ public class TestRegionMergeTransactionOnCluster {
}
}
- @Test
+ @Test (timeout=300000)
public void testMergeWithReplicas() throws Exception {
final TableName tableName = TableName.valueOf("testMergeWithReplicas");
// Create table and load data.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
index aa071ef..fa3063c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
@@ -94,12 +94,12 @@ public class TestRegionServerMetrics {
metricsHelper.assertGaugeGt(regionMetricsKey, regions, serverSource);
}
- @Test
+ @Test (timeout=180000)
public void testLocalFiles() throws Exception {
metricsHelper.assertGauge("percentFilesLocal", 0, serverSource);
}
- @Test
+ @Test (timeout=180000)
public void testRequestCount() throws Exception {
String tableNameString = "testRequestCount";
TableName tName = TableName.valueOf(tableNameString);
@@ -183,7 +183,7 @@ public class TestRegionServerMetrics {
table.close();
}
- @Test
+ @Test (timeout=180000)
public void testMutationsWithoutWal() throws Exception {
TableName tableName = TableName.valueOf("testMutationsWithoutWal");
byte[] cf = Bytes.toBytes("d");
@@ -209,7 +209,7 @@ public class TestRegionServerMetrics {
t.close();
}
- @Test
+ @Test (timeout=180000)
public void testStoreCount() throws Exception {
TableName tableName = TableName.valueOf("testStoreCount");
byte[] cf = Bytes.toBytes("d");
@@ -235,7 +235,7 @@ public class TestRegionServerMetrics {
t.close();
}
- @Test
+ @Test (timeout=180000)
public void testCheckAndPutCount() throws Exception {
String tableNameString = "testCheckAndPutCount";
TableName tableName = TableName.valueOf(tableNameString);
@@ -266,7 +266,7 @@ public class TestRegionServerMetrics {
t.close();
}
- @Test
+ @Test (timeout=180000)
public void testIncrement() throws Exception {
String tableNameString = "testIncrement";
TableName tableName = TableName.valueOf(tableNameString);
@@ -293,7 +293,7 @@ public class TestRegionServerMetrics {
t.close();
}
- @Test
+ @Test (timeout=180000)
public void testAppend() throws Exception {
String tableNameString = "testAppend";
TableName tableName = TableName.valueOf(tableNameString);
@@ -320,7 +320,7 @@ public class TestRegionServerMetrics {
t.close();
}
- @Test
+ @Test (timeout=180000)
public void testScanNext() throws IOException {
String tableNameString = "testScanNext";
TableName tableName = TableName.valueOf(tableNameString);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
index 65aed5b..51aa134 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
@@ -271,7 +271,7 @@ public class TestRegionServerNoMaster {
* Tests an on-the-fly RPC that was scheduled for the earlier RS on the same port
* for openRegion. The region server should reject this RPC. (HBASE-9721)
*/
- @Test
+ @Test (timeout=180000)
public void testOpenCloseRegionRPCIntendedForPreviousServer() throws Exception {
Assert.assertTrue(getRS().getRegion(regionName).isAvailable());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerOnlineConfigChange.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerOnlineConfigChange.java
index c58e9c6..415ac20 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerOnlineConfigChange.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerOnlineConfigChange.java
@@ -85,7 +85,7 @@ public class TestRegionServerOnlineConfigChange {
* Check if the number of compaction threads changes online
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testNumCompactionThreadsOnlineChange() throws IOException {
assertTrue(rs1.compactSplitThread != null);
int newNumSmallThreads =
@@ -111,7 +111,7 @@ public class TestRegionServerOnlineConfigChange {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testCompactionConfigurationOnlineChange() throws IOException {
String strPrefix = "hbase.hstore.compaction.";
Store s = r1.getStore(COLUMN_FAMILY1);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionSplitPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionSplitPolicy.java
index 924a196..c25b195 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionSplitPolicy.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionSplitPolicy.java
@@ -63,7 +63,7 @@ public class TestRegionSplitPolicy {
Mockito.doReturn(stores).when(mockRegion).getStores();
}
- @Test
+ @Test (timeout=60000)
public void testIncreasingToUpperBoundRegionSplitPolicy() throws IOException {
// Configure IncreasingToUpperBoundRegionSplitPolicy as our split policy
conf.set(HConstants.HBASE_REGION_SPLIT_POLICY_KEY,
@@ -122,7 +122,7 @@ public class TestRegionSplitPolicy {
assertEquals(maxSplitSize, policy.getSizeToCheck(0));
}
- @Test
+ @Test (timeout=60000)
public void testCreateDefault() throws IOException {
conf.setLong(HConstants.HREGION_MAX_FILESIZE, 1234L);
@@ -143,7 +143,7 @@ public class TestRegionSplitPolicy {
/**
* Test setting up a customized split policy
*/
- @Test
+ @Test (timeout=60000)
public void testCustomPolicy() throws IOException {
HTableDescriptor myHtd = new HTableDescriptor();
myHtd.setValue(HTableDescriptor.SPLIT_POLICY,
@@ -175,7 +175,7 @@ public class TestRegionSplitPolicy {
assertEquals("ef", Bytes.toString(policy.getSplitPoint()));
}
- @Test
+ @Test (timeout=60000)
public void testConstantSizePolicy() throws IOException {
htd.setMaxFileSize(1024L);
ConstantSizeRegionSplitPolicy policy =
@@ -220,7 +220,7 @@ public class TestRegionSplitPolicy {
stores.clear();
}
- @Test
+ @Test (timeout=60000)
public void testGetSplitPoint() throws IOException {
ConstantSizeRegionSplitPolicy policy =
(ConstantSizeRegionSplitPolicy)RegionSplitPolicy.create(mockRegion, conf);
@@ -252,7 +252,7 @@ public class TestRegionSplitPolicy {
Bytes.toString(policy.getSplitPoint()));
}
- @Test
+ @Test (timeout=60000)
public void testDelimitedKeyPrefixRegionSplitPolicy() throws IOException {
HTableDescriptor myHtd = new HTableDescriptor();
myHtd.setValue(HTableDescriptor.SPLIT_POLICY,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java
index dd7ef29..9bf107b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java
@@ -39,7 +39,7 @@ import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class})
public class TestResettingCounters {
- @Test
+ @Test (timeout=60000)
public void testResettingCounters() throws Exception {
HBaseTestingUtility htu = new HBaseTestingUtility();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java
index 9cb35ca..7335ab7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java
@@ -84,7 +84,7 @@ public class TestReversibleScanners {
private static final int VALUESIZE = 3;
private static byte[][] VALUES = makeN(VALUE, VALUESIZE);
- @Test
+ @Test (timeout=180000)
public void testReversibleStoreFileScanner() throws IOException {
FileSystem fs = TEST_UTIL.getTestFileSystem();
Path hfilePath = new Path(new Path(
@@ -119,7 +119,7 @@ public class TestReversibleScanners {
}
- @Test
+ @Test (timeout=180000)
public void testReversibleMemstoreScanner() throws IOException {
MemStore memstore = new DefaultMemStore();
writeMemstore(memstore);
@@ -133,7 +133,7 @@ public class TestReversibleScanners {
}
- @Test
+ @Test (timeout=180000)
public void testReversibleKeyValueHeap() throws IOException {
// write data to one memstore and two store files
FileSystem fs = TEST_UTIL.getTestFileSystem();
@@ -223,7 +223,7 @@ public class TestReversibleScanners {
}
}
- @Test
+ @Test (timeout=180000)
public void testReversibleStoreScanner() throws IOException {
// write data to one memstore and two store files
FileSystem fs = TEST_UTIL.getTestFileSystem();
@@ -306,7 +306,7 @@ public class TestReversibleScanners {
}
}
- @Test
+ @Test (timeout=180000)
public void testReversibleRegionScanner() throws IOException {
byte[] FAMILYNAME2 = Bytes.toBytes("testCf2");
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("testtable"))
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSCVFWithMiniCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSCVFWithMiniCluster.java
index c6c9f50..fee50d0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSCVFWithMiniCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSCVFWithMiniCluster.java
@@ -148,7 +148,7 @@ public class TestSCVFWithMiniCluster {
/**
* Test the filter by adding all columns of family A in the scan. (OK)
*/
- @Test
+ @Test (timeout=180000)
public void scanWithAllQualifiersOfFamiliyA() throws IOException {
/* Given */
Scan scan = new Scan();
@@ -162,7 +162,7 @@ public class TestSCVFWithMiniCluster {
* Test the filter by adding all columns of family A and B in the scan. (KO: row '3' without
* 'a:foo' qualifier is returned)
*/
- @Test
+ @Test (timeout=180000)
public void scanWithAllQualifiersOfBothFamilies() throws IOException {
/* When */
Scan scan = new Scan();
@@ -175,7 +175,7 @@ public class TestSCVFWithMiniCluster {
* Test the filter by adding 2 columns of family A and 1 column of family B in the scan. (KO: row
* '3' without 'a:foo' qualifier is returned)
*/
- @Test
+ @Test (timeout=180000)
public void scanWithSpecificQualifiers1() throws IOException {
/* When */
Scan scan = new Scan();
@@ -192,7 +192,7 @@ public class TestSCVFWithMiniCluster {
* Test the filter by adding 1 column of family A (the one used in the filter) and 1 column of
* family B in the scan. (OK)
*/
- @Test
+ @Test (timeout=180000)
public void scanWithSpecificQualifiers2() throws IOException {
/* When */
Scan scan = new Scan();
@@ -206,7 +206,7 @@ public class TestSCVFWithMiniCluster {
/**
* Test the filter by adding 2 columns of family A in the scan. (OK)
*/
- @Test
+ @Test (timeout=180000)
public void scanWithSpecificQualifiers3() throws IOException {
/* When */
Scan scan = new Scan();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanDeleteTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanDeleteTracker.java
index 2854832..9159994 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanDeleteTracker.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanDeleteTracker.java
@@ -42,7 +42,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
sdt = new ScanDeleteTracker();
}
- @Test
+ @Test (timeout=60000)
public void testDeletedBy_Delete() {
KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"),
Bytes.toBytes("qualifier"), timestamp, KeyValue.Type.Delete);
@@ -51,7 +51,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
assertEquals(DeleteResult.VERSION_DELETED, ret);
}
- @Test
+ @Test (timeout=60000)
public void testDeletedBy_DeleteColumn() {
KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"),
Bytes.toBytes("qualifier"), timestamp, KeyValue.Type.DeleteColumn);
@@ -63,7 +63,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
assertEquals(DeleteResult.COLUMN_DELETED, ret);
}
- @Test
+ @Test (timeout=60000)
public void testDeletedBy_DeleteFamily() {
KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("f"),
Bytes.toBytes("qualifier"), timestamp, KeyValue.Type.DeleteFamily);
@@ -75,7 +75,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
assertEquals(DeleteResult.FAMILY_DELETED, ret);
}
- @Test
+ @Test (timeout=60000)
public void testDeletedBy_DeleteFamilyVersion() {
byte [] qualifier1 = Bytes.toBytes("qualifier1");
byte [] qualifier2 = Bytes.toBytes("qualifier2");
@@ -120,7 +120,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
}
- @Test
+ @Test (timeout=60000)
public void testDelete_DeleteColumn() {
byte [] qualifier = Bytes.toBytes("qualifier");
deleteType = KeyValue.Type.Delete.getCode();
@@ -142,7 +142,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
}
- @Test
+ @Test (timeout=60000)
public void testDeleteColumn_Delete() {
byte [] qualifier = Bytes.toBytes("qualifier");
deleteType = KeyValue.Type.DeleteColumn.getCode();
@@ -163,7 +163,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
//Testing new way where we save the Delete in case of a Delete for specific
//ts, could have just added the last line to the first test, but rather keep
//them separated
- @Test
+ @Test (timeout=60000)
public void testDelete_KeepDelete(){
byte [] qualifier = Bytes.toBytes("qualifier");
deleteType = KeyValue.Type.Delete.getCode();
@@ -174,7 +174,7 @@ public class TestScanDeleteTracker extends HBaseTestCase {
assertEquals(false ,sdt.isEmpty());
}
- @Test
+ @Test (timeout=60000)
public void testDelete_KeepVersionZero(){
byte [] qualifier = Bytes.toBytes("qualifier");
deleteType = KeyValue.Type.Delete.getCode();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java
index 9163e61..d5ad796 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java
@@ -101,7 +101,7 @@ public class TestScanWithBloomError {
fs = FileSystem.get(conf);
}
- @Test
+ @Test (timeout=60000)
public void testThreeStoreFiles() throws IOException {
region = TEST_UTIL.createTestRegion(TABLE_NAME,
new HColumnDescriptor(FAMILY)
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
index 72f556e..df2a190 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
@@ -122,7 +122,7 @@ public class TestScanner {
* Test basic stop row filter works.
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testStopRow() throws Exception {
byte [] startrow = Bytes.toBytes("bbb");
byte [] stoprow = Bytes.toBytes("ccc");
@@ -197,7 +197,7 @@ public class TestScanner {
s.close();
}
- @Test
+ @Test (timeout=60000)
public void testFilters() throws IOException {
try {
this.r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
@@ -224,7 +224,7 @@ public class TestScanner {
* NPEs but instead a UnknownScannerException. HBASE-2503
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testRaceBetweenClientAndTimeout() throws Exception {
try {
this.r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
@@ -249,7 +249,7 @@ public class TestScanner {
/** The test!
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testScanner() throws IOException {
try {
r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
@@ -462,7 +462,7 @@ public class TestScanner {
* HBase-910.
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testScanAndSyncFlush() throws Exception {
this.r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
HRegionIncommon hri = new HRegionIncommon(r);
@@ -485,7 +485,7 @@ public class TestScanner {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testScanAndRealConcurrentFlush() throws Exception {
this.r = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
HRegionIncommon hri = new HRegionIncommon(r);
@@ -508,7 +508,7 @@ public class TestScanner {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
@SuppressWarnings("deprecation")
public void testScanAndConcurrentMajorCompact() throws Exception {
HTableDescriptor htd = TEST_UTIL.createTableDescriptor(name.getMethodName());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java
index 86515a6..ac6528c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java
@@ -69,7 +69,7 @@ public class TestScannerWithBulkload {
admin.createTable(desc);
}
- @Test
+ @Test (timeout=180000)
public void testBulkLoad() throws Exception {
TableName tableName = TableName.valueOf("testBulkLoad");
long l = System.currentTimeMillis();
@@ -193,7 +193,7 @@ public class TestScannerWithBulkload {
return table;
}
- @Test
+ @Test (timeout=180000)
public void testBulkLoadWithParallelScan() throws Exception {
TableName tableName = TableName.valueOf("testBulkLoadWithParallelScan");
final long l = System.currentTimeMillis();
@@ -234,7 +234,7 @@ public class TestScannerWithBulkload {
}
- @Test
+ @Test (timeout=180000)
public void testBulkLoadNativeHFile() throws Exception {
TableName tableName = TableName.valueOf("testBulkLoadNativeHFile");
long l = System.currentTimeMillis();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
index ffc76f3..39ab223 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
@@ -138,7 +138,7 @@ public class TestSeekOptimizations {
expectedKVs.clear();
}
- @Test
+ @Test (timeout=180000)
public void testMultipleTimestampRanges() throws IOException {
// enable seek counting
StoreFileScanner.instrument();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
index 5ce4456..d3647bc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
@@ -181,7 +181,7 @@ public class TestServerCustomProtocol {
util.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testSingleProxy() throws Throwable {
Table table = util.getConnection().getTable(TEST_TABLE);
Map results = ping(table, null, null);
@@ -304,7 +304,7 @@ public class TestServerCustomProtocol {
});
}
- @Test
+ @Test (timeout=180000)
public void testSingleMethod() throws Throwable {
try (HTable table = (HTable) util.getConnection().getTable(TEST_TABLE)) {
RegionLocator locator = table.getRegionLocator();
@@ -333,7 +333,7 @@ public class TestServerCustomProtocol {
}
}
- @Test
+ @Test (timeout=180000)
public void testRowRange() throws Throwable {
try (HTable table = (HTable) util.getConnection().getTable(TEST_TABLE)) {
RegionLocator locator = table.getRegionLocator();
@@ -413,7 +413,7 @@ public class TestServerCustomProtocol {
return rpcCallback.get().getPong();
}
- @Test
+ @Test (timeout=180000)
public void testCompoundCall() throws Throwable {
try (HTable table = (HTable) util.getConnection().getTable(TEST_TABLE)) {
RegionLocator locator = table.getRegionLocator();
@@ -424,7 +424,7 @@ public class TestServerCustomProtocol {
}
}
- @Test
+ @Test (timeout=180000)
public void testNullCall() throws Throwable {
try(HTable table = (HTable) util.getConnection().getTable(TEST_TABLE)) {
RegionLocator locator = table.getRegionLocator();
@@ -435,7 +435,7 @@ public class TestServerCustomProtocol {
}
}
- @Test
+ @Test (timeout=180000)
public void testNullReturn() throws Throwable {
try (HTable table = (HTable) util.getConnection().getTable(TEST_TABLE)) {
RegionLocator locator = table.getRegionLocator();
@@ -446,7 +446,7 @@ public class TestServerCustomProtocol {
}
}
- @Test
+ @Test (timeout=180000)
public void testEmptyReturnType() throws Throwable {
try (HTable table = (HTable) util.getConnection().getTable(TEST_TABLE)) {
Map results = noop(table, ROW_A, ROW_C);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerNonceManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerNonceManager.java
index 5efc12c..edae41d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerNonceManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerNonceManager.java
@@ -45,7 +45,7 @@ import org.mockito.stubbing.Answer;
@Category({RegionServerTests.class, SmallTests.class})
public class TestServerNonceManager {
- @Test
+ @Test (timeout=60000)
public void testNormalStartEnd() throws Exception {
final long[] numbers = new long[] { NO_NONCE, 1, 2, Long.MAX_VALUE, Long.MIN_VALUE };
ServerNonceManager nm = createManager();
@@ -75,7 +75,7 @@ public class TestServerNonceManager {
}
}
- @Test
+ @Test (timeout=60000)
public void testNoEndWithoutStart() {
ServerNonceManager nm = createManager();
try {
@@ -84,7 +84,7 @@ public class TestServerNonceManager {
} catch (AssertionError err) {}
}
- @Test
+ @Test (timeout=60000)
public void testCleanup() throws Exception {
ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
EnvironmentEdgeManager.injectEdge(edge);
@@ -117,7 +117,7 @@ public class TestServerNonceManager {
}
}
- @Test
+ @Test (timeout=60000)
public void testWalNonces() throws Exception {
ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
EnvironmentEdgeManager.injectEdge(edge);
@@ -149,7 +149,7 @@ public class TestServerNonceManager {
}
}
- @Test
+ @Test (timeout=60000)
public void testConcurrentAttempts() throws Exception {
final ServerNonceManager nm = createManager();
@@ -176,7 +176,7 @@ public class TestServerNonceManager {
tr.propagateError();
}
- @Test
+ @Test (timeout=60000)
public void testStopWaiting() throws Exception {
final ServerNonceManager nm = createManager();
nm.setConflictWaitIterationMs(1);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
index 9c9fa6f..87ce412 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
@@ -255,7 +255,7 @@ public class TestSplitTransaction {
assertTrue(!this.parent.lock.writeLock().isHeldByCurrentThread());
}
- @Test
+ @Test (timeout=60000)
public void testCountReferencesFailsSplit() throws IOException {
final int rowcount = TEST_UTIL.loadRegion(this.parent, CF);
assertTrue(rowcount > 0);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
index 06f9eb8..7a52df4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
@@ -672,7 +672,7 @@ public class TestSplitTransactionOnCluster {
}
}
- @Test
+ @Test (timeout=300000)
public void testSplitWithRegionReplicas() throws Exception {
final TableName tableName =
TableName.valueOf("foobar");
@@ -1034,7 +1034,7 @@ public class TestSplitTransactionOnCluster {
}
}
- @Test
+ @Test (timeout=300000)
public void testStoreFileReferenceCreationWhenSplitPolicySaysToSkipRangeCheck()
throws Exception {
final TableName tableName =
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
index 59793e0..db1bbbe 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
@@ -196,7 +196,7 @@ public class TestStore {
* Part of HBase-10466
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testFlushSizeAccounting() throws Exception {
LOG.info("Setting up a faulty file system that cannot write in " +
this.name.getMethodName());
@@ -255,7 +255,7 @@ public class TestStore {
* Verify that compression and data block encoding are respected by the
* Store.createWriterInTmp() method, used on store flush.
*/
- @Test
+ @Test (timeout=180000)
public void testCreateWriter() throws Exception {
Configuration conf = HBaseConfiguration.create();
FileSystem fs = FileSystem.get(conf);
@@ -281,7 +281,7 @@ public class TestStore {
reader.close();
}
- @Test
+ @Test (timeout=180000)
public void testDeleteExpiredStoreFiles() throws Exception {
testDeleteExpiredStoreFiles(0);
testDeleteExpiredStoreFiles(1);
@@ -354,7 +354,7 @@ public class TestStore {
assertTrue(ts < (edge.currentTime() - storeTtl));
}
- @Test
+ @Test (timeout=180000)
public void testLowestModificationTime() throws Exception {
Configuration conf = HBaseConfiguration.create();
FileSystem fs = FileSystem.get(conf);
@@ -413,7 +413,7 @@ public class TestStore {
* Test for hbase-1686.
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testEmptyStoreFile() throws IOException {
init(this.name.getMethodName());
// Write a store file.
@@ -450,7 +450,7 @@ public class TestStore {
* Getting data from memstore only
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testGet_FromMemStoreOnly() throws IOException {
init(this.name.getMethodName());
@@ -474,7 +474,7 @@ public class TestStore {
* Getting data from files only
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testGet_FromFilesOnly() throws IOException {
init(this.name.getMethodName());
@@ -513,7 +513,7 @@ public class TestStore {
* Getting data from memstore and files
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testGet_FromMemStoreAndFiles() throws IOException {
init(this.name.getMethodName());
@@ -564,7 +564,7 @@ public class TestStore {
/*
* test the internal details of how ICV works, especially during a flush scenario.
*/
- @Test
+ @Test (timeout=180000)
public void testIncrementColumnValue_ICVDuringFlush()
throws IOException, InterruptedException {
init(this.name.getMethodName());
@@ -618,7 +618,7 @@ public class TestStore {
EnvironmentEdgeManagerTestHelper.reset();
}
- @Test
+ @Test (timeout=180000)
public void testICV_negMemstoreSize() throws IOException {
init(this.name.getMethodName());
@@ -675,7 +675,7 @@ public class TestStore {
Assert.assertEquals(computedSize, size);
}
- @Test
+ @Test (timeout=180000)
public void testIncrementColumnValue_SnapshotFlushCombo() throws Exception {
ManualEnvironmentEdge mee = new ManualEnvironmentEdge();
EnvironmentEdgeManagerTestHelper.injectEdge(mee);
@@ -743,7 +743,7 @@ public class TestStore {
Assert.assertEquals(oldValue, Bytes.toLong(CellUtil.cloneValue(results.get(1))));
}
- @Test
+ @Test (timeout=180000)
public void testHandleErrorsInFlush() throws Exception {
LOG.info("Setting up a faulty file system that cannot write");
@@ -887,7 +887,7 @@ public class TestStore {
* Test to ensure correctness when using Stores with multiple timestamps
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testMultipleTimestamps() throws IOException {
int numRows = 1;
long[] timestamps1 = new long[] {1,5,10,20};
@@ -942,7 +942,7 @@ public class TestStore {
*
* @throws IOException When the IO operations fail.
*/
- @Test
+ @Test (timeout=180000)
public void testSplitWithEmptyColFam() throws IOException {
init(this.name.getMethodName());
Assert.assertNull(store.getSplitPoint());
@@ -951,7 +951,7 @@ public class TestStore {
store.getHRegion().clearSplit();
}
- @Test
+ @Test (timeout=180000)
public void testStoreUsesConfigurationFromHcdAndHtd() throws Exception {
final String CONFIG_KEY = "hbase.regionserver.thread.compaction.throttle";
long anyValue = 10;
@@ -992,7 +992,7 @@ public class TestStore {
}
}
- @Test
+ @Test (timeout=180000)
public void testStoreUsesSearchEngineOverride() throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.set(StoreEngine.STORE_ENGINE_CLASS_KEY, DummyStoreEngine.class.getName());
@@ -1028,7 +1028,7 @@ public class TestStore {
store.getRegionFileSystem().removeStoreFiles(store.getColumnFamilyName(), Lists.newArrayList(sf));
}
- @Test
+ @Test (timeout=180000)
public void testRefreshStoreFiles() throws Exception {
init(name.getMethodName());
@@ -1075,7 +1075,7 @@ public class TestStore {
}
@SuppressWarnings("unchecked")
- @Test
+ @Test (timeout=180000)
public void testRefreshStoreFilesNotChanged() throws IOException {
init(name.getMethodName());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
index e5a5022..69bb2b3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
@@ -96,7 +96,7 @@ public class TestStoreFile extends HBaseTestCase {
* using two HalfMapFiles.
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testBasicHalfMapFile() throws Exception {
final HRegionInfo hri =
new HRegionInfo(TableName.valueOf("testBasicHalfMapFileTb"));
@@ -149,7 +149,7 @@ public class TestStoreFile extends HBaseTestCase {
* store files in other regions works.
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testReference() throws IOException {
final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testReferenceTb"));
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
@@ -193,7 +193,7 @@ public class TestStoreFile extends HBaseTestCase {
assertTrue(Bytes.equals(kv.getRow(), finalRow));
}
- @Test
+ @Test (timeout=60000)
public void testHFileLink() throws IOException {
final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testHFileLinkTb"));
// force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
@@ -236,7 +236,7 @@ public class TestStoreFile extends HBaseTestCase {
* This test creates an hfile and then the dir structures and files to verify that references
* to hfilelinks (created by snapshot clones) can be properly interpreted.
*/
- @Test
+ @Test (timeout=60000)
public void testReferenceToHFileLink() throws IOException {
// force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
Configuration testConf = new Configuration(this.conf);
@@ -501,7 +501,7 @@ public class TestStoreFile extends HBaseTestCase {
private static final int BLOCKSIZE_SMALL = 8192;
- @Test
+ @Test (timeout=60000)
public void testBloomFilter() throws Exception {
FileSystem fs = FileSystem.getLocal(conf);
conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01);
@@ -522,7 +522,7 @@ public class TestStoreFile extends HBaseTestCase {
bloomWriteRead(writer, fs);
}
- @Test
+ @Test (timeout=60000)
public void testDeleteFamilyBloomFilter() throws Exception {
FileSystem fs = FileSystem.getLocal(conf);
conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01);
@@ -585,7 +585,7 @@ public class TestStoreFile extends HBaseTestCase {
/**
* Test for HBASE-8012
*/
- @Test
+ @Test (timeout=60000)
public void testReseek() throws Exception {
// write the file
Path f = new Path(ROOT_DIR, getName());
@@ -610,7 +610,7 @@ public class TestStoreFile extends HBaseTestCase {
assertNotNull("Intial reseek should position at the beginning of the file", s.peek());
}
- @Test
+ @Test (timeout=60000)
public void testBloomTypes() throws Exception {
float err = (float) 0.01;
FileSystem fs = FileSystem.getLocal(conf);
@@ -699,7 +699,7 @@ public class TestStoreFile extends HBaseTestCase {
}
}
- @Test
+ @Test (timeout=60000)
public void testSeqIdComparator() {
assertOrdering(StoreFile.Comparators.SEQ_ID,
mockStoreFile(true, 100, 1000, -1, "/foo/123"),
@@ -778,7 +778,7 @@ public class TestStoreFile extends HBaseTestCase {
* Test to ensure correctness when using StoreFile with multiple timestamps
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testMultipleTimestamps() throws IOException {
byte[] family = Bytes.toBytes("familyname");
byte[] qualifier = Bytes.toBytes("qualifier");
@@ -829,7 +829,7 @@ public class TestStoreFile extends HBaseTestCase {
assertTrue(!scanner.shouldUseScanner(scan, columns, Long.MIN_VALUE));
}
- @Test
+ @Test (timeout=60000)
public void testCacheOnWriteEvictOnClose() throws Exception {
Configuration conf = this.conf;
@@ -1002,7 +1002,7 @@ public class TestStoreFile extends HBaseTestCase {
* Check if data block encoding information is saved correctly in HFile's
* file info.
*/
- @Test
+ @Test (timeout=60000)
public void testDataBlockEncodingMetaData() throws IOException {
// Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
Path dir = new Path(new Path(testDir, "7e0102"), "familyname");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileInfo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileInfo.java
index da39f59..5a05c07 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileInfo.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileInfo.java
@@ -36,7 +36,7 @@ public class TestStoreFileInfo extends HBaseTestCase {
/**
* Validate that we can handle valid tables with '.', '_', and '-' chars.
*/
- @Test
+ @Test (timeout=60000)
public void testStoreFileNames() {
String[] legalHFileLink = { "MyTable_02=abc012-def345", "MyTable_02.300=abc012-def345",
"MyTable_02-400=abc012-def345", "MyTable_02-400.200=abc012-def345",
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java
index 1bcb7c9..89606e5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java
@@ -59,7 +59,7 @@ public class TestStoreFileScannerWithTagCompression {
fs = FileSystem.get(conf);
}
- @Test
+ @Test (timeout=60000)
public void testReseek() throws Exception {
// write the file
Path f = new Path(ROOT_DIR, "testReseek");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeCompactor.java
index 86d670c..0bb5941 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeCompactor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeCompactor.java
@@ -79,7 +79,7 @@ public class TestStripeCompactor {
return TestStripeCompactor.a();
}
- @Test
+ @Test (timeout=60000)
public void testBoundaryCompactions() throws Exception {
// General verification
verifyBoundaryCompaction(a(KV_A, KV_A, KV_B, KV_B, KV_C, KV_D),
@@ -88,7 +88,7 @@ public class TestStripeCompactor {
verifyBoundaryCompaction(a(KV_B, KV_C), a(KEY_B, KEY_D), new KeyValue[][] { a(KV_B, KV_C) });
}
- @Test
+ @Test (timeout=60000)
public void testBoundaryCompactionEmptyFiles() throws Exception {
// No empty file if there're already files.
verifyBoundaryCompaction(
@@ -131,7 +131,7 @@ public class TestStripeCompactor {
}
}
- @Test
+ @Test (timeout=60000)
public void testSizeCompactions() throws Exception {
// General verification with different sizes.
verifySizeCompaction(a(KV_A, KV_A, KV_B, KV_C, KV_D), 3, 2, OPEN_KEY, OPEN_KEY,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java
index 32ab164..2151228 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java
@@ -49,7 +49,7 @@ import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class})
public class TestStripeStoreEngine {
- @Test
+ @Test (timeout=60000)
public void testCreateBasedOnConfig() throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.set(StoreEngine.STORE_ENGINE_CLASS_KEY, TestStoreEngine.class.getName());
@@ -63,7 +63,7 @@ public class TestStripeStoreEngine {
}
}
- @Test
+ @Test (timeout=60000)
public void testCompactionContextForceSelect() throws Exception {
Configuration conf = HBaseConfiguration.create();
int targetCount = 2;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java
index 48f93e0..a30c403 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java
@@ -83,7 +83,7 @@ public class TestStripeStoreFileManager {
}
}
- @Test
+ @Test (timeout=60000)
public void testInsertFilesIntoL0() throws Exception {
StripeStoreFileManager manager = createManager();
MockStoreFile sf = createFile();
@@ -100,7 +100,7 @@ public class TestStripeStoreFileManager {
assertTrue(manager.getFilesForScanOrGet(true, KEY_C, KEY_C).contains(sf));
}
- @Test
+ @Test (timeout=60000)
public void testClearFiles() throws Exception {
StripeStoreFileManager manager = createManager();
manager.insertNewFiles(al(createFile()));
@@ -120,7 +120,7 @@ public class TestStripeStoreFileManager {
return result;
}
- @Test
+ @Test (timeout=60000)
public void testRowKeyBefore() throws Exception {
StripeStoreFileManager manager = createManager();
StoreFile l0File = createFile(), l0File2 = createFile();
@@ -169,7 +169,7 @@ public class TestStripeStoreFileManager {
assertEquals(stripe0a, sfs.next());
}
- @Test
+ @Test (timeout=60000)
public void testGetSplitPointEdgeCases() throws Exception {
StripeStoreFileManager manager = createManager();
// No files => no split.
@@ -193,7 +193,7 @@ public class TestStripeStoreFileManager {
assertEquals(sf6.splitPoint, manager.getSplitPoint());
}
- @Test
+ @Test (timeout=60000)
public void testGetStripeBoundarySplits() throws Exception {
/* First number - split must be after this stripe; further numbers - stripes */
verifySplitPointScenario(5, false, 0f, 2, 1, 1, 1, 1, 1, 10);
@@ -203,7 +203,7 @@ public class TestStripeStoreFileManager {
verifySplitPointScenario(2, false, 0f, 5, 2, 5, 5, 5);
}
- @Test
+ @Test (timeout=60000)
public void testGetUnbalancedSplits() throws Exception {
/* First number - split must be inside/after this stripe; further numbers - stripes */
verifySplitPointScenario(0, false, 2.1f, 4, 4, 4); // 8/4 is less than 2.1f
@@ -255,7 +255,7 @@ public class TestStripeStoreFileManager {
return Arrays.copyOf(key, key.length + 1);
}
- @Test
+ @Test (timeout=60000)
public void testGetFilesForGetAndScan() throws Exception {
StripeStoreFileManager manager = createManager();
verifyGetAndScanScenario(manager, null, null);
@@ -294,7 +294,7 @@ public class TestStripeStoreFileManager {
verifyGetOrScanScenario(manager, false, start, end, results);
}
- @Test
+ @Test (timeout=60000)
@SuppressWarnings("unchecked")
public void testLoadFilesWithRecoverableBadFiles() throws Exception {
// In L0, there will be file w/o metadata (real L0, 3 files with invalid metadata, and 3
@@ -317,7 +317,7 @@ public class TestStripeStoreFileManager {
verifyAllFiles(manager, allFilesToGo);
}
- @Test
+ @Test (timeout=60000)
public void testLoadFilesWithBadStripe() throws Exception {
// Current "algorithm" will see the after-B key before C key, add it as valid stripe,
// and then fail all other stripes. So everything would end up in L0.
@@ -329,7 +329,7 @@ public class TestStripeStoreFileManager {
assertEquals(allFilesToGo.size(), manager.getLevel0Files().size());
}
- @Test
+ @Test (timeout=60000)
public void testLoadFilesWithGaps() throws Exception {
// Stripes must not have gaps. If they do, everything goes to L0.
StripeStoreFileManager manager =
@@ -341,7 +341,7 @@ public class TestStripeStoreFileManager {
assertEquals(1, manager.getStorefileCount());
}
- @Test
+ @Test (timeout=60000)
public void testLoadFilesAfterSplit() throws Exception {
// If stripes are good but have non-open ends, they must be treated as open ends.
MockStoreFile sf = createFile(KEY_B, KEY_C);
@@ -359,7 +359,7 @@ public class TestStripeStoreFileManager {
manager.addCompactionResults(al(sf), al(createFile(OPEN_KEY, OPEN_KEY)));
}
- @Test
+ @Test (timeout=60000)
public void testAddingCompactionResults() throws Exception {
StripeStoreFileManager manager = createManager();
// First, add some L0 files and "compact" one with new stripe creation.
@@ -435,7 +435,7 @@ public class TestStripeStoreFileManager {
verifyAllFiles(manager, al(sf_i2i_9));
}
- @Test
+ @Test (timeout=60000)
public void testCompactionAndFlushConflict() throws Exception {
// Add file flush into stripes
StripeStoreFileManager sfm = createManager();
@@ -461,7 +461,7 @@ public class TestStripeStoreFileManager {
verifyGetAndScanScenario(sfm, KEY_C, KEY_C, sf_i2d, sf_i2c_2);
}
- @Test
+ @Test (timeout=60000)
public void testEmptyResultsForStripes() throws Exception {
// Test that we can compact L0 into a subset of stripes.
StripeStoreFileManager manager = createManager();
@@ -479,7 +479,7 @@ public class TestStripeStoreFileManager {
verifyAllFiles(manager, compacted);
}
- @Test
+ @Test (timeout=60000)
public void testPriority() throws Exception {
// Expected priority, file limit, stripe count, files per stripe, l0 files.
testPriorityScenario(5, 5, 0, 0, 0);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
index 14c6ca9..843803a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
@@ -96,7 +96,7 @@ public class TestTags {
useFilter = false;
}
- @Test
+ @Test (timeout=180000)
public void testTags() throws Exception {
Table table = null;
try {
@@ -162,7 +162,7 @@ public class TestTags {
}
}
- @Test
+ @Test (timeout=180000)
public void testFlushAndCompactionWithoutTags() throws Exception {
Table table = null;
try {
@@ -252,7 +252,7 @@ public class TestTags {
}
}
- @Test
+ @Test (timeout=180000)
public void testFlushAndCompactionwithCombinations() throws Exception {
TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
byte[] fam = Bytes.toBytes("info");
@@ -373,7 +373,7 @@ public class TestTags {
}
}
- @Test
+ @Test (timeout=180000)
public void testTagsWithAppendAndIncrement() throws Exception {
TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
byte[] f = Bytes.toBytes("f");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimeRangeTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimeRangeTracker.java
index edec023..1ef36bf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimeRangeTracker.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimeRangeTracker.java
@@ -27,7 +27,7 @@ import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class})
public class TestTimeRangeTracker {
- @Test
+ @Test (timeout=60000)
public void testAlwaysDecrementingSetsMaximum() {
TimeRangeTracker trr = new TimeRangeTracker();
trr.includeTimestamp(3);
@@ -37,7 +37,7 @@ public class TestTimeRangeTracker {
assertTrue(trr.getMaximumTimestamp() != -1 /*The initial max value*/);
}
- @Test
+ @Test (timeout=60000)
public void testSimpleInRange() {
TimeRangeTracker trr = new TimeRangeTracker();
trr.includeTimestamp(0);
@@ -51,7 +51,7 @@ public class TestTimeRangeTracker {
* of the thread index; max is 10 * 10k and min is 0.
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=60000)
public void testArriveAtRightAnswer() throws InterruptedException {
final TimeRangeTracker trr = new TimeRangeTracker();
final int threadCount = 10;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java
index ba05e9a..00f3ca5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java
@@ -87,7 +87,7 @@ public class TestWideScanner extends HBaseTestCase {
return count;
}
- @Test
+ @Test (timeout=60000)
public void testWideScanBatching() throws IOException {
final int batch = 256;
try {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactionWithThroughputController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactionWithThroughputController.java
index 8d0d5a8..f27fffa 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactionWithThroughputController.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactionWithThroughputController.java
@@ -162,7 +162,7 @@ public class TestCompactionWithThroughputController {
}
}
- @Test
+ @Test (timeout=180000)
public void testCompaction() throws Exception {
long limitTime = testCompactionWithThroughputLimit();
long noLimitTime = testCompactionWithoutThroughputLimit();
@@ -176,7 +176,7 @@ public class TestCompactionWithThroughputController {
/**
* Test the tuning task of {@link PressureAwareCompactionThroughputController}
*/
- @Test
+ @Test (timeout=180000)
public void testThroughputTuning() throws Exception {
Configuration conf = TEST_UTIL.getConfiguration();
conf.set(StoreEngine.STORE_ENGINE_CLASS_KEY, DefaultStoreEngine.class.getName());
@@ -241,7 +241,7 @@ public class TestCompactionWithThroughputController {
/**
* Test the logic that we calculate compaction pressure for a striped store.
*/
- @Test
+ @Test (timeout=180000)
public void testGetCompactionPressureForStripedStore() throws Exception {
Configuration conf = TEST_UTIL.getConfiguration();
conf.set(StoreEngine.STORE_ENGINE_CLASS_KEY, StripeStoreEngine.class.getName());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestOffPeakHours.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestOffPeakHours.java
index f43c29a..fad5604 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestOffPeakHours.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestOffPeakHours.java
@@ -53,14 +53,14 @@ public class TestOffPeakHours {
conf = testUtil.getConfiguration();
}
- @Test
+ @Test (timeout=60000)
public void testWithoutSettings() {
Configuration conf = testUtil.getConfiguration();
OffPeakHours target = OffPeakHours.getInstance(conf);
assertFalse(target.isOffPeakHour(hourOfDay));
}
- @Test
+ @Test (timeout=60000)
public void testSetPeakHourToTargetTime() {
conf.setLong(CompactionConfiguration.HBASE_HSTORE_OFFPEAK_START_HOUR, hourMinusOne);
conf.setLong(CompactionConfiguration.HBASE_HSTORE_OFFPEAK_END_HOUR, hourPlusOne);
@@ -68,7 +68,7 @@ public class TestOffPeakHours {
assertTrue(target.isOffPeakHour(hourOfDay));
}
- @Test
+ @Test (timeout=60000)
public void testSetPeakHourOutsideCurrentSelection() {
conf.setLong(CompactionConfiguration.HBASE_HSTORE_OFFPEAK_START_HOUR, hourMinusTwo);
conf.setLong(CompactionConfiguration.HBASE_HSTORE_OFFPEAK_END_HOUR, hourMinusOne);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
index f3b7be4..314353a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java
@@ -96,7 +96,7 @@ public class TestStripeCompactionPolicy {
private final static int defaultInitialCount = 1;
private static long defaultTtl = 1000 * 1000;
- @Test
+ @Test (timeout=60000)
public void testNoStripesFromFlush() throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.setBoolean(StripeStoreConfig.FLUSH_TO_L0_KEY, true);
@@ -108,7 +108,7 @@ public class TestStripeCompactionPolicy {
verifyFlush(policy, si, input, expected, null);
}
- @Test
+ @Test (timeout=60000)
public void testOldStripesFromFlush() throws Exception {
StripeCompactionPolicy policy = createPolicy(HBaseConfiguration.create());
StripeInformationProvider si = createStripes(0, KEY_C, KEY_D);
@@ -119,7 +119,7 @@ public class TestStripeCompactionPolicy {
verifyFlush(policy, si, input, expected, new byte[][] { OPEN_KEY, KEY_C, KEY_D, OPEN_KEY });
}
- @Test
+ @Test (timeout=60000)
public void testNewStripesFromFlush() throws Exception {
StripeCompactionPolicy policy = createPolicy(HBaseConfiguration.create());
StripeInformationProvider si = createStripesL0Only(0, 0);
@@ -129,7 +129,7 @@ public class TestStripeCompactionPolicy {
verifyFlush(policy, si, input, expected, new byte[][] { OPEN_KEY, OPEN_KEY });
}
- @Test
+ @Test (timeout=60000)
public void testSingleStripeCompaction() throws Exception {
// Create a special policy that only compacts single stripes, using standard methods.
Configuration conf = HBaseConfiguration.create();
@@ -191,7 +191,7 @@ public class TestStripeCompactionPolicy {
verifyCompaction(policy, si, sfs, null, 1, null, si.getStartRow(1), si.getEndRow(1), true);
}
- @Test
+ @Test (timeout=60000)
public void testWithParallelCompaction() throws Exception {
// TODO: currently only one compaction at a time per store is allowed. If this changes,
// the appropriate file exclusion testing would need to be done in respective tests.
@@ -199,7 +199,7 @@ public class TestStripeCompactionPolicy {
mock(StripeInformationProvider.class), al(createFile()), false));
}
- @Test
+ @Test (timeout=60000)
public void testWithReferences() throws Exception {
StripeCompactionPolicy policy = createPolicy(HBaseConfiguration.create());
StripeCompactor sc = mock(StripeCompactor.class);
@@ -218,7 +218,7 @@ public class TestStripeCompactionPolicy {
any(NoLimitCompactionThroughputController.class));
}
- @Test
+ @Test (timeout=60000)
public void testInitialCountFromL0() throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.setInt(StripeStoreConfig.MIN_FILES_L0_KEY, 2);
@@ -232,7 +232,7 @@ public class TestStripeCompactionPolicy {
verifyCompaction(policy, si, si.getStorefiles(), true, 6, 5L, OPEN_KEY, OPEN_KEY, true);
}
- @Test
+ @Test (timeout=60000)
public void testExistingStripesFromL0() throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.setInt(StripeStoreConfig.MIN_FILES_L0_KEY, 3);
@@ -241,7 +241,7 @@ public class TestStripeCompactionPolicy {
createPolicy(conf), si, si.getLevel0Files(), null, null, si.getStripeBoundaries());
}
- @Test
+ @Test (timeout=60000)
public void testNothingToCompactFromL0() throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.setInt(StripeStoreConfig.MIN_FILES_L0_KEY, 4);
@@ -253,7 +253,7 @@ public class TestStripeCompactionPolicy {
verifyNoCompaction(policy, si);
}
- @Test
+ @Test (timeout=60000)
public void testSplitOffStripe() throws Exception {
Configuration conf = HBaseConfiguration.create();
// Test depends on this not being set to pass. Default breaks test. TODO: Revisit.
@@ -284,7 +284,7 @@ public class TestStripeCompactionPolicy {
verifySingleStripeCompaction(specPolicy, si, 1, null);
}
- @Test
+ @Test (timeout=60000)
public void testSplitOffStripeOffPeak() throws Exception {
// for HBASE-11439
Configuration conf = HBaseConfiguration.create();
@@ -304,7 +304,7 @@ public class TestStripeCompactionPolicy {
.size());
}
- @Test
+ @Test (timeout=60000)
public void testSplitOffStripeDropDeletes() throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.setInt(StripeStoreConfig.MIN_FILES_KEY, 2);
@@ -323,7 +323,7 @@ public class TestStripeCompactionPolicy {
}
@SuppressWarnings("unchecked")
- @Test
+ @Test (timeout=60000)
public void testMergeExpiredFiles() throws Exception {
ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
long now = defaultTtl + 2;
@@ -362,7 +362,7 @@ public class TestStripeCompactionPolicy {
}
@SuppressWarnings("unchecked")
- @Test
+ @Test (timeout=60000)
public void testMergeExpiredStripes() throws Exception {
// HBASE-11397
ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
@@ -399,7 +399,7 @@ public class TestStripeCompactionPolicy {
Lists.newArrayList(stripeFiles), new ArrayList());
}
- @Test
+ @Test (timeout=60000)
public void testSingleStripeDropDeletes() throws Exception {
Configuration conf = HBaseConfiguration.create();
// Test depends on this not being set to pass. Default breaks test. TODO: Revisit.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestCompressor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestCompressor.java
index 03baf48..a94299d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestCompressor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestCompressor.java
@@ -45,7 +45,7 @@ public class TestCompressor {
public static void setUpBeforeClass() throws Exception {
}
- @Test
+ @Test (timeout=60000)
public void testToShort() {
short s = 1;
assertEquals(s, Compressor.toShort((byte)0, (byte)1));
@@ -58,7 +58,7 @@ public class TestCompressor {
Compressor.toShort((byte)0xff, (byte)0xff);
}
- @Test
+ @Test (timeout=60000)
public void testCompressingWithNullDictionaries() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
@@ -72,7 +72,7 @@ public class TestCompressor {
assertTrue(Bytes.equals(blahBytes, product));
}
- @Test
+ @Test (timeout=60000)
public void testCompressingWithClearDictionaries() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestCustomWALCellCodec.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestCustomWALCellCodec.java
index 624f2c2..405549c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestCustomWALCellCodec.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestCustomWALCellCodec.java
@@ -48,7 +48,7 @@ public class TestCustomWALCellCodec {
* {@link WALCellCodec}
* @throws Exception on failure
*/
- @Test
+ @Test (timeout=60000)
public void testCreatePreparesCodec() throws Exception {
Configuration conf = new Configuration(false);
conf.setClass(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, CustomWALCellCodec.class,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
index 10e7e3d..8b6458d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
@@ -80,7 +80,7 @@ public class TestDurability {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testDurability() throws Exception {
final WALFactory wals = new WALFactory(CONF, null, "TestDurability");
byte[] tableName = Bytes.toBytes("TestDurability");
@@ -137,7 +137,7 @@ public class TestDurability {
verifyWALCount(wals, wal, 12);
}
- @Test
+ @Test (timeout=180000)
public void testIncrement() throws Exception {
byte[] row1 = Bytes.toBytes("row1");
byte[] col1 = Bytes.toBytes("col1");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
index 2c25e3b..b97f56c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java
@@ -137,7 +137,7 @@ public class TestFSHLog {
/**
* A loaded WAL coprocessor won't break existing WAL test cases.
*/
- @Test
+ @Test (timeout=180000)
public void testWALCoprocessorLoaded() throws Exception {
// test to see whether the coprocessor is loaded or not.
FSHLog log = null;
@@ -328,7 +328,7 @@ public class TestFSHLog {
* If a region's entries are larger than min of (oldestFlushing, oldestUnFlushed), then the
* region should be flushed before archiving this WAL.
*/
- @Test
+ @Test (timeout=180000)
public void testAllRegionsFlushed() {
LOG.debug("testAllRegionsFlushed");
Map oldestFlushingSeqNo = new HashMap();
@@ -392,7 +392,7 @@ public class TestFSHLog {
* @throws IOException
* @see HBASE-11109
*/
- @Test
+ @Test (timeout=180000)
public void testFlushSequenceIdIsGreaterThanAllEditsInHFile() throws IOException {
String testName = "testFlushSequenceIdIsGreaterThanAllEditsInHFile";
final TableName tableName = TableName.valueOf(testName);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestKeyValueCompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestKeyValueCompression.java
index 0450904..d80dce1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestKeyValueCompression.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestKeyValueCompression.java
@@ -42,7 +42,7 @@ public class TestKeyValueCompression {
private static final byte[] VALUE = Bytes.toBytes("fake value");
private static final int BUF_SIZE = 256*1024;
- @Test
+ @Test (timeout=60000)
public void testCountingKVs() throws Exception {
List kvs = Lists.newArrayList();
for (int i = 0; i < 400; i++) {
@@ -55,7 +55,7 @@ public class TestKeyValueCompression {
runTestCycle(kvs);
}
- @Test
+ @Test (timeout=60000)
public void testRepeatingKVs() throws Exception {
List kvs = Lists.newArrayList();
for (int i = 0; i < 400; i++) {
@@ -84,7 +84,7 @@ public class TestKeyValueCompression {
}
}
- @Test
+ @Test (timeout=60000)
public void testKVWithTags() throws Exception {
CompressionContext ctx = new CompressionContext(LRUDictionary.class, false, false);
DataOutputBuffer buf = new DataOutputBuffer(BUF_SIZE);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
index 4e07040..94175e2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
@@ -123,7 +123,7 @@ public class TestLogRollAbort {
* Tests that RegionServer aborts if we hit an error closing the WAL when
* there are unsynced WAL edits. See HBASE-4282.
*/
- @Test
+ @Test (timeout=180000)
public void testRSAbortWithUnflushedEdits() throws Exception {
LOG.info("Starting testRSAbortWithUnflushedEdits()");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollPeriod.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollPeriod.java
index 5216ee5..850f1f9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollPeriod.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollPeriod.java
@@ -72,7 +72,7 @@ public class TestLogRollPeriod {
/**
* Tests that the LogRoller perform the roll even if there are no edits
*/
- @Test
+ @Test (timeout=180000)
public void testNoEdits() throws Exception {
TableName tableName = TableName.valueOf("TestLogRollPeriodNoEdits");
TEST_UTIL.createTable(tableName, "cf");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
index 111acf3..9c4ba0a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
@@ -213,7 +213,7 @@ public class TestLogRolling {
* @throws IOException
* @throws org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException
*/
- @Test
+ @Test (timeout=300000)
public void testLogRolling() throws Exception {
this.tableName = getName();
// TODO: Why does this write data take for ever?
@@ -296,7 +296,7 @@ public class TestLogRolling {
* Tests that logs are rolled upon detecting datanode death
* Requires an HDFS jar with HDFS-826 & syncFs() support (HDFS-200)
*/
- @Test
+ @Test (timeout=300000)
public void testLogRollOnDatanodeDeath() throws Exception {
TEST_UTIL.ensureSomeRegionServersAvailable(2);
assertTrue("This test requires WAL file replication set to 2.",
@@ -412,7 +412,7 @@ public class TestLogRolling {
* restarted.
* @throws Exception
*/
- @Test
+ @Test (timeout=300000)
public void testLogRollOnPipelineRestart() throws Exception {
LOG.info("Starting testLogRollOnPipelineRestart");
assertTrue("This test requires WAL file replication.",
@@ -561,7 +561,7 @@ public class TestLogRolling {
* Tests that logs are deleted when some region has a compaction
* record in WAL and no other records. See HBASE-8597.
*/
- @Test
+ @Test (timeout=300000)
public void testCompactionRecordDoesntBlockRolling() throws Exception {
Table table = null;
Table table2 = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java
index 41e05ae..de8d2c9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java
@@ -58,7 +58,7 @@ public class TestLogRollingNoCluster {
* @throws IOException
* @throws InterruptedException
*/
- @Test
+ @Test (timeout=60000)
public void testContendedLogRolling() throws IOException, InterruptedException {
FileSystem fs = FileSystem.get(TEST_UTIL.getConfiguration());
Path dir = TEST_UTIL.getDataTestDir();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestMetricsWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestMetricsWAL.java
index d9183d0..6f097ef 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestMetricsWAL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestMetricsWAL.java
@@ -32,7 +32,7 @@ import static org.mockito.Mockito.verify;
@Category({MiscTests.class, SmallTests.class})
public class TestMetricsWAL {
- @Test
+ @Test (timeout=60000)
public void testLogRollRequested() throws Exception {
MetricsWALSource source = mock(MetricsWALSourceImpl.class);
MetricsWAL metricsWAL = new MetricsWAL(source);
@@ -45,7 +45,7 @@ public class TestMetricsWAL {
verify(source, times(1)).incrementLowReplicationLogRoll();
}
- @Test
+ @Test (timeout=60000)
public void testPostSync() throws Exception {
long nanos = TimeUnit.MILLISECONDS.toNanos(145);
MetricsWALSource source = mock(MetricsWALSourceImpl.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestProtobufLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestProtobufLog.java
index 04cb2ce..807497e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestProtobufLog.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestProtobufLog.java
@@ -122,7 +122,7 @@ public class TestProtobufLog {
* Reads the WAL with and without WALTrailer.
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testWALTrailer() throws IOException {
// read With trailer.
doRead(true);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestReadOldRootAndMetaEdits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestReadOldRootAndMetaEdits.java
index b256651..b94b4e9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestReadOldRootAndMetaEdits.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestReadOldRootAndMetaEdits.java
@@ -83,7 +83,7 @@ public class TestReadOldRootAndMetaEdits {
* and last waledit is for the hbase:meta table, which will be linked to the new system:meta table.
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testReadOldRootAndMetaEdits() throws IOException {
LOG.debug("testReadOldRootAndMetaEdits");
// kv list to be used for all WALEdits.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java
index 25c83a8..0a7a858 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java
@@ -81,7 +81,7 @@ public class TestWALActionsListener {
* should end up with 10 rolled files (plus the roll called in
* the constructor). Also test adding a listener while it's running.
*/
- @Test
+ @Test (timeout=60000)
public void testActionListener() throws Exception {
DummyWALActionsListener observer = new DummyWALActionsListener();
List list = new ArrayList();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java
index 501fdda..a3fc460 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java
@@ -42,12 +42,12 @@ import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class})
public class TestWALCellCodecWithCompression {
- @Test
+ @Test (timeout=60000)
public void testEncodeDecodeKVsWithTags() throws Exception {
doTest(false);
}
- @Test
+ @Test (timeout=60000)
public void testEncodeDecodeKVsWithTagsWithTagsCompression() throws Exception {
doTest(true);
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
index bb634d1..f529fed 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
@@ -175,7 +175,7 @@ public class TestWALReplay {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testReplayEditsAfterRegionMovedWithMultiCF() throws Exception {
final TableName tableName =
TableName.valueOf("testReplayEditsAfterRegionMovedWithMultiCF");
@@ -274,7 +274,7 @@ public class TestWALReplay {
* @throws Exception
* @see HBASE-2727
*/
- @Test
+ @Test (timeout=180000)
public void test2727() throws Exception {
// Test being able to have > 1 set of edits in the recovered.edits directory.
// Ensure edits are replayed properly.
@@ -336,7 +336,7 @@ public class TestWALReplay {
* @throws IllegalArgumentException
* @throws SecurityException
*/
- @Test
+ @Test (timeout=180000)
public void testRegionMadeOfBulkLoadedFilesOnly()
throws IOException, SecurityException, IllegalArgumentException,
NoSuchFieldException, IllegalAccessException, InterruptedException {
@@ -402,7 +402,7 @@ public class TestWALReplay {
* @throws IllegalArgumentException
* @throws SecurityException
*/
- @Test
+ @Test (timeout=180000)
public void testCompactedBulkLoadedFiles()
throws IOException, SecurityException, IllegalArgumentException,
NoSuchFieldException, IllegalAccessException, InterruptedException {
@@ -472,7 +472,7 @@ public class TestWALReplay {
* @throws IllegalArgumentException
* @throws SecurityException
*/
- @Test
+ @Test (timeout=180000)
public void testReplayEditsWrittenViaHRegion()
throws IOException, SecurityException, IllegalArgumentException,
NoSuchFieldException, IllegalAccessException, InterruptedException {
@@ -581,7 +581,7 @@ public class TestWALReplay {
* @throws IllegalArgumentException
* @throws SecurityException
*/
- @Test
+ @Test (timeout=180000)
public void testReplayEditsAfterPartialFlush()
throws IOException, SecurityException, IllegalArgumentException,
NoSuchFieldException, IllegalAccessException, InterruptedException {
@@ -667,7 +667,7 @@ public class TestWALReplay {
* and flush again, at last verify the data.
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testReplayEditsAfterAbortingFlush() throws IOException {
final TableName tableName =
TableName.valueOf("testReplayEditsAfterAbortingFlush");
@@ -764,7 +764,7 @@ public class TestWALReplay {
* good edits
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testReplayEditsWrittenIntoWAL() throws Exception {
final TableName tableName =
TableName.valueOf("testReplayEditsWrittenIntoWAL");
@@ -859,7 +859,7 @@ public class TestWALReplay {
});
}
- @Test
+ @Test (timeout=180000)
// the following test is for HBASE-6065
public void testSequentialEditLogSeqNum() throws IOException {
final TableName tableName = TableName.valueOf(currentTest.getMethodName());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
index 169feba..81f93ec 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
@@ -179,7 +179,7 @@ public class TestPerTableCFReplication {
utility1.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=300000)
public void testParseTableCFsFromConfig() {
Map> tabCFsMap = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java
index aac966e..df039e0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java
@@ -76,7 +76,7 @@ public class TestReplicationSource {
* time reading logs that are being archived.
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testLogMoving() throws Exception{
Path logPath = new Path(logDir, "log");
if (!FS.exists(logDir)) FS.mkdirs(logDir);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateZKImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateZKImpl.java
index fff6c9d..e96b36b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateZKImpl.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateZKImpl.java
@@ -108,18 +108,18 @@ public class TestReplicationStateZKImpl extends TestReplicationStateBasic {
utility.shutdownMiniZKCluster();
}
- @Test
+ @Test (timeout=180000)
public void testIsPeerPath_PathToParentOfPeerNode() {
assertFalse(rqZK.isPeerPath(rqZK.peersZNode));
}
- @Test
+ @Test (timeout=180000)
public void testIsPeerPath_PathToChildOfPeerNode() {
String peerChild = ZKUtil.joinZNode(ZKUtil.joinZNode(rqZK.peersZNode, "1"), "child");
assertFalse(rqZK.isPeerPath(peerChild));
}
- @Test
+ @Test (timeout=180000)
public void testIsPeerPath_ActualPeerPath() {
String peerPath = ZKUtil.joinZNode(rqZK.peersZNode, "1");
assertTrue(rqZK.isPeerPath(peerPath));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationTrackerZKImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationTrackerZKImpl.java
index a082b19..43a0c96 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationTrackerZKImpl.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationTrackerZKImpl.java
@@ -108,7 +108,7 @@ public class TestReplicationTrackerZKImpl {
utility.shutdownMiniZKCluster();
}
- @Test
+ @Test (timeout=180000)
public void testGetListOfRegionServers() throws Exception {
// 0 region servers
assertEquals(0, rt.getListOfRegionServers().size());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java
index 3710fd6..fee449d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java
@@ -52,7 +52,7 @@ public class TestReplicationWALEntryFilters {
static byte[] c = new byte[] {'c'};
static byte[] d = new byte[] {'d'};
- @Test
+ @Test (timeout=60000)
public void testSystemTableWALEntryFilter() {
SystemTableWALEntryFilter filter = new SystemTableWALEntryFilter();
@@ -76,7 +76,7 @@ public class TestReplicationWALEntryFilters {
assertEquals(userEntry, filter.filter(userEntry));
}
- @Test
+ @Test (timeout=60000)
public void testScopeWALEntryFilter() {
ScopeWALEntryFilter filter = new ScopeWALEntryFilter();
@@ -153,7 +153,7 @@ public class TestReplicationWALEntryFilters {
}
};
- @Test
+ @Test (timeout=60000)
public void testChainWALEntryFilter() {
Entry userEntry = createEntry(a, b, c);
@@ -201,7 +201,7 @@ public class TestReplicationWALEntryFilters {
assertEquals(null, filter.filter(userEntry));
}
- @Test
+ @Test (timeout=60000)
public void testTableCfWALEntryFilter() {
ReplicationPeer peer = mock(ReplicationPeer.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java
index 7ca12f0..76d8193 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java
@@ -112,7 +112,7 @@ public class TestRegionReplicaReplicationEndpoint {
}
@Ignore("To be fixed before 1.0")
- @Test
+ @Test (timeout=180000)
public void testRegionReplicaReplicationPeerIsCreated() throws IOException, ReplicationException {
// create a table with region replicas. Check whether the replication peer is created
// and replication started.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java
index 2326301..001d145 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java
@@ -157,7 +157,7 @@ public class TestRegionReplicaReplicationEndpointNoMaster {
}
}
- @Test
+ @Test (timeout=180000)
public void testReplayCallable() throws Exception {
// tests replaying the edits to a secondary region replica using the Callable directly
openRegion(HTU, rs0, hriSecondary);
@@ -197,7 +197,7 @@ public class TestRegionReplicaReplicationEndpointNoMaster {
}
}
- @Test
+ @Test (timeout=180000)
public void testReplayCallableWithRegionMove() throws Exception {
// tests replaying the edits to a secondary region replica using the Callable directly while
// the region is moved to another location.It tests handling of RME.
@@ -232,7 +232,7 @@ public class TestRegionReplicaReplicationEndpointNoMaster {
connection.close();
}
- @Test
+ @Test (timeout=180000)
public void testRegionReplicaReplicationEndpointReplicate() throws Exception {
// tests replaying the edits to a secondary region replica using the RRRE.replicate()
openRegion(HTU, rs0, hriSecondary);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
index b87e7ef..3a822da 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
@@ -127,7 +127,7 @@ public class TestReplicationSink {
* Insert a whole batch of entries
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testBatchSink() throws Exception {
List entries = new ArrayList(BATCH_SIZE);
List cells = new ArrayList();
@@ -144,7 +144,7 @@ public class TestReplicationSink {
* Insert a mix of puts and deletes
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testMixedPutDelete() throws Exception {
List entries = new ArrayList(BATCH_SIZE/2);
List cells = new ArrayList();
@@ -170,7 +170,7 @@ public class TestReplicationSink {
* Insert to 2 different tables
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testMixedPutTables() throws Exception {
List entries = new ArrayList(BATCH_SIZE/2);
List cells = new ArrayList();
@@ -191,7 +191,7 @@ public class TestReplicationSink {
* Insert then do different types of deletes
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testMixedDeletes() throws Exception {
List entries = new ArrayList(3);
List cells = new ArrayList();
@@ -217,7 +217,7 @@ public class TestReplicationSink {
* before the actual Put that creates it.
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testApplyDeleteBeforePut() throws Exception {
List entries = new ArrayList(5);
List cells = new ArrayList| ();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSinkManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSinkManager.java
index a2ea258..d3170a2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSinkManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSinkManager.java
@@ -55,7 +55,7 @@ public class TestReplicationSinkManager {
PEER_CLUSTER_ID, replicationEndpoint, new Configuration());
}
- @Test
+ @Test (timeout=60000)
public void testChooseSinks() {
List serverNames = Lists.newArrayList();
for (int i = 0; i < 20; i++) {
@@ -71,7 +71,7 @@ public class TestReplicationSinkManager {
}
- @Test
+ @Test (timeout=60000)
public void testChooseSinks_LessThanRatioAvailable() {
List serverNames = Lists.newArrayList(mock(ServerName.class),
mock(ServerName.class));
@@ -84,7 +84,7 @@ public class TestReplicationSinkManager {
assertEquals(1, sinkManager.getSinks().size());
}
- @Test
+ @Test (timeout=60000)
public void testReportBadSink() {
ServerName serverNameA = mock(ServerName.class);
ServerName serverNameB = mock(ServerName.class);
@@ -108,7 +108,7 @@ public class TestReplicationSinkManager {
* Once a SinkPeer has been reported as bad more than BAD_SINK_THRESHOLD times, it should not
* be replicated to anymore.
*/
- @Test
+ @Test (timeout=60000)
public void testReportBadSink_PastThreshold() {
List serverNames = Lists.newArrayList();
for (int i = 0; i < 20; i++) {
@@ -135,7 +135,7 @@ public class TestReplicationSinkManager {
assertEquals(1, sinkManager.getSinks().size());
}
- @Test
+ @Test (timeout=60000)
public void testReportBadSink_DownToZeroSinks() {
List serverNames = Lists.newArrayList();
for (int i = 0; i < 20; i++) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java
index 854d4c0..3e4503a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java
@@ -186,7 +186,7 @@ public class TestReplicationSourceManager {
setUp();
}
- @Test
+ @Test (timeout=180000)
public void testLogRoll() throws Exception {
long seq = 0;
long baseline = 1000;
@@ -245,7 +245,7 @@ public class TestReplicationSourceManager {
// TODO Need a case with only 2 WALs and we only want to delete the first one
}
- @Test
+ @Test (timeout=180000)
public void testClaimQueues() throws Exception {
LOG.debug("testNodeFailoverWorkerCopyQueuesFromRSUsingMulti");
conf.setBoolean(HConstants.ZOOKEEPER_USEMULTI, true);
@@ -288,7 +288,7 @@ public class TestReplicationSourceManager {
server.abort("", null);
}
- @Test
+ @Test (timeout=180000)
public void testCleanupFailoverQueues() throws Exception {
final Server server = new DummyServer("hostname1.example.org");
ReplicationQueues rq =
@@ -322,7 +322,7 @@ public class TestReplicationSourceManager {
assertEquals(Sets.newHashSet("log2"), manager.getWalsByIdRecoveredQueues().get(id));
}
- @Test
+ @Test (timeout=180000)
public void testNodeFailoverDeadServerParsing() throws Exception {
LOG.debug("testNodeFailoverDeadServerParsing");
conf.setBoolean(HConstants.ZOOKEEPER_USEMULTI, true);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationWALReaderManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationWALReaderManager.java
index 577f0ba..e23ce1e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationWALReaderManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationWALReaderManager.java
@@ -136,7 +136,7 @@ public class TestReplicationWALReaderManager {
log.close();
}
- @Test
+ @Test (timeout=300000)
public void test() throws Exception {
// Grab the path that was generated when the log rolled as part of its creation
Path path = pathWatcher.currentPath;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
index 21450a2..3d4a5ba 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
@@ -77,7 +77,7 @@ public class TestHBaseSaslRpcClient {
Logger.getRootLogger().setLevel(Level.DEBUG);
}
- @Test
+ @Test (timeout=60000)
public void testSaslQOPNotEmpty() throws Exception {
Token extends TokenIdentifier> token = createTokenMockWithCredentials(DEFAULT_USER_NAME,
DEFAULT_USER_PASSWORD);
@@ -103,7 +103,7 @@ public class TestHBaseSaslRpcClient {
INTEGRITY.getSaslQop()));
}
- @Test
+ @Test (timeout=60000)
public void testSaslClientCallbackHandler() throws UnsupportedCallbackException {
final Token extends TokenIdentifier> token = createTokenMock();
when(token.getIdentifier()).thenReturn(DEFAULT_USER_NAME.getBytes());
@@ -123,7 +123,7 @@ public class TestHBaseSaslRpcClient {
verify(passwordCallback).setPassword(any(char[].class));
}
- @Test
+ @Test (timeout=60000)
public void testSaslClientCallbackHandlerWithException() {
final Token extends TokenIdentifier> token = createTokenMock();
when(token.getIdentifier()).thenReturn(DEFAULT_USER_NAME.getBytes());
@@ -138,7 +138,7 @@ public class TestHBaseSaslRpcClient {
}
}
- @Test
+ @Test (timeout=60000)
public void testHBaseSaslRpcClientCreation() throws Exception {
//creation kerberos principal check section
assertFalse(assertSuccessCreationKerberosPrincipal(null));
@@ -168,7 +168,7 @@ public class TestHBaseSaslRpcClient {
DEFAULT_USER_NAME, DEFAULT_USER_PASSWORD));
}
- @Test
+ @Test (timeout=60000)
public void testAuthMethodReadWrite() throws IOException {
DataInputBuffer in = new DataInputBuffer();
DataOutputBuffer out = new DataOutputBuffer();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureRPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureRPC.java
index 8ac38fa..7c537d5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureRPC.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureRPC.java
@@ -97,12 +97,12 @@ public class TestSecureRPC {
TEST_UTIL.cleanupTestDir();
}
- @Test
+ @Test (timeout=60000)
public void testRpc() throws Exception {
testRpcCallWithEnabledKerberosSaslAuth(RpcClientImpl.class);
}
- @Test
+ @Test (timeout=60000)
public void testAsyncRpc() throws Exception {
testRpcCallWithEnabledKerberosSaslAuth(AsyncRpcClient.class);
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java
index f85832e..91f83ef 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java
@@ -43,7 +43,7 @@ import com.google.common.collect.ImmutableSet;
public class TestUser {
private static Log LOG = LogFactory.getLog(TestUser.class);
- @Test
+ @Test (timeout=60000)
public void testBasicAttributes() throws Exception {
Configuration conf = HBaseConfiguration.create();
User user = User.createUserForTesting(conf, "simple", new String[]{"foo"});
@@ -52,7 +52,7 @@ public class TestUser {
// don't test shortening of kerberos names because regular Hadoop doesn't support them
}
- @Test
+ @Test (timeout=60000)
public void testRunAs() throws Exception {
Configuration conf = HBaseConfiguration.create();
final User user = User.createUserForTesting(conf, "testuser", new String[]{"foo"});
@@ -114,7 +114,7 @@ public class TestUser {
* Previously getCurrent() was returning null if not initialized on
* non-secure Hadoop variants.
*/
- @Test
+ @Test (timeout=60000)
public void testGetCurrent() throws Exception {
User user1 = User.getCurrent();
assertNotNull(user1.ugi);
@@ -129,7 +129,7 @@ public class TestUser {
}
}
- @Test
+ @Test (timeout=60000)
public void testUserGroupNames() throws Exception {
final String username = "testuser";
final ImmutableSet singleGroups = ImmutableSet.of("group");
@@ -154,7 +154,7 @@ public class TestUser {
}
}
- @Test
+ @Test (timeout=60000)
public void testSecurityForNonSecureHadoop() {
assertFalse("Security should be disable in non-secure Hadoop",
User.isSecurityEnabled());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUsersOperationsWithSecureHadoop.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUsersOperationsWithSecureHadoop.java
index a66c124..df80c1d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUsersOperationsWithSecureHadoop.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUsersOperationsWithSecureHadoop.java
@@ -50,7 +50,7 @@ public class TestUsersOperationsWithSecureHadoop {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=60000)
public void testUserLoginInSecureHadoop() throws Exception {
UserGroupInformation defaultLogin = UserGroupInformation.getLoginUser();
Configuration conf = getConfigurationWoPrincipal();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java
index d6a6f03..8aa2b4a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java
@@ -92,7 +92,7 @@ public class TestAccessControlFilter extends SecureTestUtil {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=300000)
public void testQualifierAccess() throws Exception {
final Table table = TEST_UTIL.createTable(TABLE, FAMILY);
try {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
index 51436b4..6dbc479 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
@@ -290,7 +290,7 @@ public class TestAccessController extends SecureTestUtil {
assertEquals(0, AccessControlLists.getNamespacePermissions(conf, TEST_TABLE.getTableName().getNameAsString()).size());
}
- @Test
+ @Test (timeout=300000)
public void testTableCreate() throws Exception {
AccessTestAction createTable = new AccessTestAction() {
@Override
@@ -309,7 +309,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(createTable, USER_CREATE, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testTableModify() throws Exception {
AccessTestAction modifyTable = new AccessTestAction() {
@Override
@@ -327,7 +327,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(modifyTable, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testTableDelete() throws Exception {
AccessTestAction deleteTable = new AccessTestAction() {
@Override
@@ -342,7 +342,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(deleteTable, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testTableTruncate() throws Exception {
AccessTestAction truncateTable = new AccessTestAction() {
@Override
@@ -358,7 +358,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(truncateTable, USER_RW, USER_RO, USER_NONE, USER_OWNER);
}
- @Test
+ @Test (timeout=300000)
public void testAddColumn() throws Exception {
final HColumnDescriptor hcd = new HColumnDescriptor("fam_new");
AccessTestAction action = new AccessTestAction() {
@@ -374,7 +374,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testModifyColumn() throws Exception {
final HColumnDescriptor hcd = new HColumnDescriptor(TEST_FAMILY);
hcd.setMaxVersions(10);
@@ -391,7 +391,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testDeleteColumn() throws Exception {
AccessTestAction action = new AccessTestAction() {
@Override
@@ -406,7 +406,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testTableDisable() throws Exception {
AccessTestAction disableTable = new AccessTestAction() {
@Override
@@ -433,7 +433,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(disableAclTable, SUPERUSER, USER_ADMIN, USER_CREATE, USER_OWNER, USER_RW, USER_RO);
}
- @Test
+ @Test (timeout=300000)
public void testTableEnable() throws Exception {
AccessTestAction enableTable = new AccessTestAction() {
@Override
@@ -448,7 +448,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(enableTable, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testMove() throws Exception {
List regions;
try (RegionLocator locator =
@@ -471,7 +471,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_CREATE, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testAssign() throws Exception {
List regions;
try (RegionLocator locator =
@@ -492,7 +492,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_CREATE, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testUnassign() throws Exception {
List regions;
try (RegionLocator locator =
@@ -513,7 +513,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_CREATE, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testRegionOffline() throws Exception {
List regions;
try (RegionLocator locator =
@@ -534,7 +534,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_CREATE, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testBalance() throws Exception {
AccessTestAction action = new AccessTestAction() {
@Override
@@ -548,7 +548,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_CREATE, USER_OWNER, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testBalanceSwitch() throws Exception {
AccessTestAction action = new AccessTestAction() {
@Override
@@ -562,7 +562,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_CREATE, USER_OWNER, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testShutdown() throws Exception {
AccessTestAction action = new AccessTestAction() {
@Override
@@ -576,7 +576,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_CREATE, USER_OWNER, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testStopMaster() throws Exception {
AccessTestAction action = new AccessTestAction() {
@Override
@@ -595,7 +595,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_NONE, USER_RO);
}
- @Test
+ @Test (timeout=300000)
public void testSplit() throws Exception {
AccessTestAction action = new AccessTestAction() {
@Override
@@ -609,7 +609,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_CREATE, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testSplitWithSplitRow() throws Exception {
AccessTestAction action = new AccessTestAction() {
@Override
@@ -625,7 +625,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_CREATE, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testMergeRegions() throws Exception {
final List regions = TEST_UTIL.getHBaseCluster().findRegionsForTable(TEST_TABLE.getTableName());
@@ -644,7 +644,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_CREATE, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testFlush() throws Exception {
AccessTestAction action = new AccessTestAction() {
@Override
@@ -658,7 +658,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testCompact() throws Exception {
AccessTestAction action = new AccessTestAction() {
@Override
@@ -673,7 +673,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testPreCompactSelection() throws Exception {
AccessTestAction action = new AccessTestAction() {
@Override
@@ -697,7 +697,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_NONE, USER_RO);
}
- @Test
+ @Test (timeout=300000)
public void testRead() throws Exception {
// get action
AccessTestAction getAction = new AccessTestAction() {
@@ -743,7 +743,7 @@ public class TestAccessController extends SecureTestUtil {
verifyRead(scanAction);
}
- @Test
+ @Test (timeout=300000)
// test put, delete, increment
public void testWrite() throws Exception {
// put action
@@ -798,7 +798,7 @@ public class TestAccessController extends SecureTestUtil {
verifyWrite(incrementAction);
}
- @Test
+ @Test (timeout=300000)
public void testReadWrite() throws Exception {
// action for checkAndDelete
AccessTestAction checkAndDeleteAction = new AccessTestAction() {
@@ -837,7 +837,7 @@ public class TestAccessController extends SecureTestUtil {
verifyReadWrite(checkAndPut);
}
- @Test
+ @Test (timeout=300000)
public void testBulkLoad() throws Exception {
FileSystem fs = TEST_UTIL.getTestFileSystem();
final Path dir = TEST_UTIL.getDataTestDirOnTestFS("testBulkLoad");
@@ -946,7 +946,7 @@ public class TestAccessController extends SecureTestUtil {
}
}
- @Test
+ @Test (timeout=300000)
public void testAppend() throws Exception {
AccessTestAction appendAction = new AccessTestAction() {
@@ -973,7 +973,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(appendAction, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testGrantRevoke() throws Exception {
AccessTestAction grantAction = new AccessTestAction() {
@Override
@@ -1063,7 +1063,7 @@ public class TestAccessController extends SecureTestUtil {
USER_OWNER, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testPostGrantRevoke() throws Exception {
final TableName tableName =
TableName.valueOf("TempTable");
@@ -1338,7 +1338,7 @@ public class TestAccessController extends SecureTestUtil {
return perms.contains(userPermission);
}
- @Test
+ @Test (timeout=300000)
public void testPostGrantRevokeAtQualifierLevel() throws Exception {
final TableName tableName =
TableName.valueOf("testGrantRevokeAtQualifierLevel");
@@ -1453,7 +1453,7 @@ public class TestAccessController extends SecureTestUtil {
admin.deleteTable(tableName);
}
- @Test
+ @Test (timeout=300000)
public void testPermissionList() throws Exception {
final TableName tableName =
TableName.valueOf("testPermissionList");
@@ -1586,7 +1586,7 @@ public class TestAccessController extends SecureTestUtil {
admin.deleteTable(tableName);
}
- @Test
+ @Test (timeout=300000)
public void testGlobalPermissionList() throws Exception {
List perms;
Table acl = TEST_UTIL.getConnection().getTable(AccessControlLists.ACL_TABLE_NAME);
@@ -1668,7 +1668,7 @@ public class TestAccessController extends SecureTestUtil {
}
}
- @Test
+ @Test (timeout=300000)
public void testCheckPermissions() throws Exception {
// --------------------------------------
// test global permissions
@@ -1821,7 +1821,7 @@ public class TestAccessController extends SecureTestUtil {
}
}
- @Test
+ @Test (timeout=300000)
public void testStopRegionServer() throws Exception {
AccessTestAction action = new AccessTestAction() {
@Override
@@ -1835,7 +1835,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_CREATE, USER_OWNER, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testRollWALWriterRequest() throws Exception {
AccessTestAction action = new AccessTestAction() {
@Override
@@ -1849,7 +1849,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_CREATE, USER_OWNER, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testOpenRegion() throws Exception {
AccessTestAction action = new AccessTestAction() {
@Override
@@ -1863,7 +1863,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER);
}
- @Test
+ @Test (timeout=300000)
public void testCloseRegion() throws Exception {
AccessTestAction action = new AccessTestAction() {
@Override
@@ -1877,7 +1877,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER);
}
- @Test
+ @Test (timeout=300000)
public void testSnapshot() throws Exception {
Admin admin = TEST_UTIL.getHBaseAdmin();
final HTableDescriptor htd = admin.getTableDescriptor(TEST_TABLE.getTableName());
@@ -1934,7 +1934,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(cloneAction, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER);
}
- @Test
+ @Test (timeout=300000)
public void testSnapshotWithOwner() throws Exception {
Admin admin = TEST_UTIL.getHBaseAdmin();
final HTableDescriptor htd = admin.getTableDescriptor(TEST_TABLE.getTableName());
@@ -1990,7 +1990,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(cloneAction, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER);
}
- @Test
+ @Test (timeout=300000)
public void testGlobalAuthorizationForNewRegisteredRS() throws Exception {
LOG.debug("Test for global authorization for a new registered RegionServer.");
MiniHBaseCluster hbaseCluster = TEST_UTIL.getHBaseCluster();
@@ -2063,7 +2063,7 @@ public class TestAccessController extends SecureTestUtil {
}
}
- @Test
+ @Test (timeout=300000)
public void testTableDescriptorsEnumeration() throws Exception {
User TABLE_ADMIN = User.createUserForTesting(conf, "UserA", new String[0]);
@@ -2111,7 +2111,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(getTableDescAction, USER_RW, USER_RO, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testTableNameEnumeration() throws Exception {
AccessTestAction listTablesAction = new AccessTestAction() {
@Override
@@ -2132,7 +2132,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(listTablesAction, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testTableDeletion() throws Exception {
User TABLE_ADMIN = User.createUserForTesting(conf, "TestUser", new String[0]);
@@ -2161,7 +2161,7 @@ public class TestAccessController extends SecureTestUtil {
verifyAllowed(deleteTableAction, TABLE_ADMIN);
}
- @Test
+ @Test (timeout=300000)
public void testNamespaceUserGrant() throws Exception {
AccessTestAction getAction = new AccessTestAction() {
@Override
@@ -2186,7 +2186,7 @@ public class TestAccessController extends SecureTestUtil {
verifyAllowed(getAction, USER_NONE);
}
- @Test
+ @Test (timeout=300000)
public void testAccessControlClientGrantRevoke() throws Exception {
// Create user for testing, who has no READ privileges by default.
User testGrantRevoke = User.createUserForTesting(conf, "testGrantRevoke", new String[0]);
@@ -2227,7 +2227,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(getAction, testGrantRevoke);
}
- @Test
+ @Test (timeout=300000)
public void testAccessControlClientGlobalGrantRevoke() throws Exception {
// Create user for testing, who has no READ privileges by default.
User testGlobalGrantRevoke = User.createUserForTesting(conf,
@@ -2269,7 +2269,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(getAction, testGlobalGrantRevoke);
}
- @Test
+ @Test (timeout=300000)
public void testAccessControlClientGrantRevokeOnNamespace() throws Exception {
// Create user for testing, who has no READ privileges by default.
User testNS = User.createUserForTesting(conf, "testNS", new String[0]);
@@ -2356,7 +2356,7 @@ public class TestAccessController extends SecureTestUtil {
}
}
- @Test
+ @Test (timeout=300000)
public void testCoprocessorExec() throws Exception {
// Set up our ping endpoint service on all regions of our test table
for (JVMClusterUtil.RegionServerThread thread:
@@ -2405,7 +2405,7 @@ public class TestAccessController extends SecureTestUtil {
verifyAllowed(execEndpointAction, userA, userB);
}
- @Test
+ @Test (timeout=300000)
public void testReservedCellTags() throws Exception {
AccessTestAction putWithReservedTag = new AccessTestAction() {
@Override
@@ -2431,7 +2431,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(putWithReservedTag, USER_OWNER, USER_ADMIN, USER_CREATE, USER_RW, USER_RO);
}
- @Test
+ @Test (timeout=300000)
public void testSetQuota() throws Exception {
AccessTestAction setUserQuotaAction = new AccessTestAction() {
@Override
@@ -2494,7 +2494,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(setNamespaceQuotaAction, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER);
}
- @Test
+ @Test (timeout=300000)
public void testGetNamespacePermission() throws Exception {
String namespace = "testNamespace";
NamespaceDescriptor desc = NamespaceDescriptor.create(namespace).build();
@@ -2511,7 +2511,7 @@ public class TestAccessController extends SecureTestUtil {
TEST_UTIL.getMiniHBaseCluster().getMaster().deleteNamespace(namespace);
}
- @Test
+ @Test (timeout=300000)
public void testTruncatePerms() throws Exception {
try {
List existingPerms = AccessControlClient.getUserPermissions(conf,
@@ -2544,7 +2544,7 @@ public class TestAccessController extends SecureTestUtil {
};
}
- @Test
+ @Test (timeout=300000)
public void testAccessControlClientUserPerms() throws Exception {
// adding default prefix explicitly as it is not included in the table name.
assertEquals(NamespaceDescriptor.DEFAULT_NAMESPACE_NAME_STR,
@@ -2562,7 +2562,7 @@ public class TestAccessController extends SecureTestUtil {
assertEquals(6, perms.size());
}
- @Test
+ @Test (timeout=300000)
public void testAccessControllerUserPermsRegexHandling() throws Exception {
User testRegexHandler = User.createUserForTesting(conf, "testRegexHandling", new String[0]);
@@ -2619,7 +2619,7 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(action, USER_NONE, USER_RO, USER_RW);
}
- @Test
+ @Test (timeout=300000)
public void testPrepareAndCleanBulkLoad() throws Exception {
AccessTestAction prepareBulkLoadAction = new AccessTestAction() {
@Override
@@ -2639,7 +2639,7 @@ public class TestAccessController extends SecureTestUtil {
verifyAnyCreate(cleanupBulkLoadAction);
}
- @Test
+ @Test (timeout=300000)
public void testReplicateLogEntries() throws Exception {
AccessTestAction replicateLogEntriesAction = new AccessTestAction() {
@Override
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java
index f6066ad..001ef7a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java
@@ -78,7 +78,7 @@ public class TestAccessController2 extends SecureTestUtil {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=300000)
public void testCreateWithCorrectOwner() throws Exception {
// Create a test user
final User testUser = User.createUserForTesting(TEST_UTIL.getConfiguration(), "TestUser",
@@ -115,7 +115,7 @@ public class TestAccessController2 extends SecureTestUtil {
assertTrue(perms.get(0).implies(Permission.Action.ADMIN));
}
- @Test
+ @Test (timeout=300000)
public void testACLTableAccess() throws Exception {
final Configuration conf = TEST_UTIL.getConfiguration();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
index e239647..c78ccf1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
@@ -147,7 +147,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
Threads.sleep(1000);
}
- @Test
+ @Test (timeout=180000)
public void testCellPermissionwithVersions() throws Exception {
// store two sets of values, one store with a cell level ACL, and one
// without
@@ -250,7 +250,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
verifyAllowed(USER_OTHER, get2, 1);
}
- @Test
+ @Test (timeout=180000)
public void testCellPermissionsWithDeleteMutipleVersions() throws Exception {
// table/column/qualifier level permissions
final byte[] TEST_ROW1 = Bytes.toBytes("r1");
@@ -368,7 +368,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
}
- @Test
+ @Test (timeout=180000)
public void testDeleteWithFutureTimestamp() throws Exception {
// Store two values, one in the future
@@ -451,7 +451,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
verifyDenied(getQ2, USER_OTHER);
}
- @Test
+ @Test (timeout=180000)
public void testCellPermissionsWithDeleteWithUserTs() throws Exception {
USER_OWNER.runAs(new AccessTestAction() {
@Override
@@ -527,7 +527,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
});
}
- @Test
+ @Test (timeout=180000)
public void testCellPermissionsWithDeleteExactVersion() throws Exception {
final byte[] TEST_ROW1 = Bytes.toBytes("r1");
final byte[] TEST_Q1 = Bytes.toBytes("q1");
@@ -629,7 +629,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
});
}
- @Test
+ @Test (timeout=180000)
public void testCellPermissionsForIncrementWithMultipleVersions() throws Exception {
final byte[] TEST_ROW1 = Bytes.toBytes("r1");
final byte[] TEST_Q1 = Bytes.toBytes("q1");
@@ -713,7 +713,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
});
}
- @Test
+ @Test (timeout=180000)
public void testCellPermissionsForPutWithMultipleVersions() throws Exception {
final byte[] TEST_ROW1 = Bytes.toBytes("r1");
final byte[] TEST_Q1 = Bytes.toBytes("q1");
@@ -802,7 +802,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
});
}
- @Test
+ @Test (timeout=180000)
public void testCellPermissionsForCheckAndDelete() throws Exception {
final byte[] TEST_ROW1 = Bytes.toBytes("r1");
final byte[] ZERO = Bytes.toBytes(0L);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
index b7cbc52..4936b68 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
@@ -141,7 +141,7 @@ public class TestCellACLs extends SecureTestUtil {
Threads.sleep(1000);
}
- @Test
+ @Test (timeout=180000)
public void testCellPermissions() throws Exception {
// store two sets of values, one store with a cell level ACL, and one without
verifyAllowed(new AccessTestAction() {
@@ -407,7 +407,7 @@ public class TestCellACLs extends SecureTestUtil {
* Insure we are not granting access in the absence of any cells found
* when scanning for covered cells.
*/
- @Test
+ @Test (timeout=180000)
public void testCoveringCheck() throws Exception {
// Grant read access to USER_OTHER
grantOnTable(TEST_UTIL, USER_OTHER.getShortName(), TEST_TABLE.getTableName(),
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java
index 887b56d..dfd510c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java
@@ -154,7 +154,7 @@ public class TestNamespaceCommands extends SecureTestUtil {
UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testAclTableEntries() throws Exception {
String userTestNamespace = "userTestNsp";
Table acl = UTIL.getConnection().getTable(AccessControlLists.ACL_TABLE_NAME);
@@ -194,7 +194,7 @@ public class TestNamespaceCommands extends SecureTestUtil {
}
}
- @Test
+ @Test (timeout=180000)
public void testModifyNamespace() throws Exception {
AccessTestAction modifyNamespace = new AccessTestAction() {
public Object run() throws Exception {
@@ -221,7 +221,7 @@ public class TestNamespaceCommands extends SecureTestUtil {
USER_NS_EXEC);
}
- @Test
+ @Test (timeout=180000)
public void testCreateAndDeleteNamespace() throws Exception {
AccessTestAction createNamespace = new AccessTestAction() {
@Override
@@ -279,7 +279,7 @@ public class TestNamespaceCommands extends SecureTestUtil {
USER_TABLE_WRITE);
}
- @Test
+ @Test (timeout=180000)
public void testGetNamespaceDescriptor() throws Exception {
AccessTestAction getNamespaceAction = new AccessTestAction() {
@Override
@@ -308,7 +308,7 @@ public class TestNamespaceCommands extends SecureTestUtil {
USER_TABLE_WRITE);
}
- @Test
+ @Test (timeout=180000)
public void testListNamespaces() throws Exception {
AccessTestAction listAction = new AccessTestAction() {
@Override
@@ -351,7 +351,7 @@ public class TestNamespaceCommands extends SecureTestUtil {
assertEquals(0, ((List)USER_TABLE_WRITE.runAs(listAction)).size());
}
- @Test
+ @Test (timeout=180000)
public void testGrantRevoke() throws Exception{
final String testUser = "testUser";
@@ -464,7 +464,7 @@ public class TestNamespaceCommands extends SecureTestUtil {
USER_TABLE_WRITE);
}
- @Test
+ @Test (timeout=180000)
public void testCreateTableWithNamespace() throws Exception {
AccessTestAction createTable = new AccessTestAction() {
@Override
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java
index e1dfdbf..1ae6110 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java
@@ -145,7 +145,7 @@ public class TestScanEarlyTermination extends SecureTestUtil {
assertEquals(0, AccessControlLists.getTablePermissions(conf, TEST_TABLE.getTableName()).size());
}
- @Test
+ @Test (timeout=180000)
public void testEarlyScanTermination() throws Exception {
// Grant USER_OTHER access to TEST_FAMILY1 only
grantOnTable(TEST_UTIL, USER_OTHER.getShortName(), TEST_TABLE.getTableName(), TEST_FAMILY1,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
index 3456158..2b727c5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
@@ -124,7 +124,7 @@ public class TestTablePermissions {
* Test we can read permissions serialized with Writables.
* @throws DeserializationException
*/
- @Test
+ @Test (timeout=300000)
public void testMigration() throws DeserializationException {
Configuration conf = UTIL.getConfiguration();
ListMultimap permissions = createPermissions();
@@ -169,7 +169,7 @@ public class TestTablePermissions {
}
- @Test
+ @Test (timeout=300000)
public void testBasicWrite() throws Exception {
Configuration conf = UTIL.getConfiguration();
// add some permissions
@@ -265,7 +265,7 @@ public class TestTablePermissions {
assertTrue(actions.contains(TablePermission.Action.WRITE));
}
- @Test
+ @Test (timeout=300000)
public void testPersistence() throws Exception {
Configuration conf = UTIL.getConfiguration();
AccessControlLists.addUserPermission(conf,
@@ -305,7 +305,7 @@ public class TestTablePermissions {
checkMultimapEqual(preperms, postperms);
}
- @Test
+ @Test (timeout=300000)
public void testSerialization() throws Exception {
Configuration conf = UTIL.getConfiguration();
ListMultimap permissions = createPermissions();
@@ -346,7 +346,7 @@ public class TestTablePermissions {
}
}
- @Test
+ @Test (timeout=300000)
public void testEquals() throws Exception {
TablePermission p1 = new TablePermission(TEST_TABLE, null, TablePermission.Action.READ);
TablePermission p2 = new TablePermission(TEST_TABLE, null, TablePermission.Action.READ);
@@ -391,7 +391,7 @@ public class TestTablePermissions {
assertFalse(p2.equals(p1));
}
- @Test
+ @Test (timeout=300000)
public void testGlobalPermission() throws Exception {
Configuration conf = UTIL.getConfiguration();
@@ -428,7 +428,7 @@ public class TestTablePermissions {
user3Perms.get(0).getActions());
}
- @Test
+ @Test (timeout=300000)
public void testAuthManager() throws Exception {
Configuration conf = UTIL.getConfiguration();
/* test a race condition causing TableAuthManager to sometimes fail global permissions checks
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionsWatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionsWatcher.java
index 9c2bc3c..170e5bd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionsWatcher.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionsWatcher.java
@@ -86,7 +86,7 @@ public class TestZKPermissionsWatcher {
UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=300000)
public void testPermissionsWatcher() throws Exception {
Configuration conf = UTIL.getConfiguration();
User george = User.createUserForTesting(conf, "george", new String[] { });
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestAuthenticationKey.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestAuthenticationKey.java
index 9734159..5db2322 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestAuthenticationKey.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestAuthenticationKey.java
@@ -33,7 +33,7 @@ import org.mockito.Mockito;
@Category({SecurityTests.class, SmallTests.class})
public class TestAuthenticationKey {
- @Test
+ @Test (timeout=60000)
public void test() throws UnsupportedEncodingException {
SecretKey secret = Mockito.mock(SecretKey.class);
Mockito.when(secret.getEncoded()).thenReturn("secret".getBytes("UTF-8"));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
index 3bd20b2..f870f85 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
@@ -381,7 +381,7 @@ public class TestTokenAuthentication {
TEST_UTIL.shutdownMiniZKCluster();
}
- @Test
+ @Test (timeout=180000)
public void testTokenCreation() throws Exception {
Token token =
secretManager.generateToken("testuser");
@@ -395,7 +395,7 @@ public class TestTokenAuthentication {
Bytes.equals(token.getPassword(), passwd));
}
- @Test
+ @Test (timeout=180000)
public void testTokenAuthentication() throws Exception {
UserGroupInformation testuser =
UserGroupInformation.createUserForTesting("testuser", new String[]{"testgroup"});
@@ -436,7 +436,7 @@ public class TestTokenAuthentication {
});
}
- @Test
+ @Test (timeout=180000)
public void testUseExistingToken() throws Exception {
User user = User.createUserForTesting(TEST_UTIL.getConfiguration(), "testuser2",
new String[]{"testgroup"});
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java
index 9552ad3..ff32178 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java
@@ -128,7 +128,7 @@ public class TestZKSecretWatcher {
TEST_UTIL.shutdownMiniZKCluster();
}
- @Test
+ @Test (timeout=300000)
public void testKeyUpdate() throws Exception {
// sanity check
assertTrue(KEY_MASTER.isMaster());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java
index 061db74..2ceeb65 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java
@@ -100,7 +100,7 @@ public class TestDefaultScanLabelGeneratorStack {
});
}
- @Test
+ @Test (timeout=180000)
public void testDefaultScanLabelGeneratorStack() throws Exception {
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java
index df165bd..384130c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java
@@ -95,7 +95,7 @@ public class TestEnforcingScanLabelGenerator {
});
}
- @Test
+ @Test (timeout=180000)
public void testEnforcingScanLabelGenerator() throws Exception {
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionExpander.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionExpander.java
index e0c0b98..196998d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionExpander.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionExpander.java
@@ -32,7 +32,7 @@ import org.junit.experimental.categories.Category;
@Category({SecurityTests.class, SmallTests.class})
public class TestExpressionExpander {
- @Test
+ @Test (timeout=60000)
public void testPositiveCases() throws Exception {
ExpressionExpander expander = new ExpressionExpander();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionParser.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionParser.java
index 7c7f54b..a2f3e53 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionParser.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestExpressionParser.java
@@ -35,7 +35,7 @@ public class TestExpressionParser {
private ExpressionParser parser = new ExpressionParser();
- @Test
+ @Test (timeout=60000)
public void testPositiveCases() throws Exception {
// abc -> (abc)
ExpressionNode node = parser.parse("abc");
@@ -290,7 +290,7 @@ public class TestExpressionParser {
assertEquals("b", ((LeafExpressionNode) nlNodeRight.getChildExps().get(0)).getIdentifier());
}
- @Test
+ @Test (timeout=60000)
public void testNegativeCases() throws Exception {
executeNegativeCase("(");
executeNegativeCase(")");
@@ -309,7 +309,7 @@ public class TestExpressionParser {
executeNegativeCase("! a");
}
- @Test
+ @Test (timeout=60000)
public void testNonAsciiCases() throws Exception {
ExpressionNode node = parser.parse(CellVisibility.quote("\u0027") + "&"
+ CellVisibility.quote("\u002b") + "|" + CellVisibility.quote("\u002d") + "&"
@@ -351,7 +351,7 @@ public class TestExpressionParser {
assertEquals("\u0027", ((LeafExpressionNode) nlNode.getChildExps().get(0)).getIdentifier());
}
- @Test
+ @Test (timeout=60000)
public void testCasesSeperatedByDoubleQuotes() throws Exception {
ExpressionNode node = null;
try {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java
index 2c4955c..fe227af 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java
@@ -79,7 +79,7 @@ public class TestVisibilityLabelsOpWithDifferentUsersNoACL {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testLabelsTableOpsWithDifferentUsers() throws Throwable {
PrivilegedExceptionAction action =
new PrivilegedExceptionAction() {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
index c087f4e..dd72fc3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
@@ -217,7 +217,7 @@ public class TestVisibilityLabelsReplication {
DefaultVisibilityLabelServiceImpl.class, VisibilityLabelService.class);
}
- @Test
+ @Test (timeout=180000)
public void testVisibilityReplication() throws Exception {
Table table = writeData(TABLE_NAME, "(" + SECRET + "&" + PUBLIC + ")" + "|(" + CONFIDENTIAL
+ ")&(" + TOPSECRET + ")", "(" + PRIVATE + "|" + CONFIDENTIAL + ")&(" + PUBLIC + "|"
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
index 7e7d8a3..95cf6c1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
@@ -112,7 +112,7 @@ public class TestVisibilityLabelsWithACL {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void testScanForUserWithFewerLabelAuthsThanLabelsInScanAuthorizations() throws Throwable {
String[] auths = { SECRET };
String user = "user2";
@@ -145,7 +145,7 @@ public class TestVisibilityLabelsWithACL {
NORMAL_USER2.runAs(scanAction);
}
- @Test
+ @Test (timeout=180000)
public void testScanForSuperUserWithFewerLabelAuths() throws Throwable {
String[] auths = { SECRET };
String user = "admin";
@@ -171,7 +171,7 @@ public class TestVisibilityLabelsWithACL {
SUPERUSER.runAs(scanAction);
}
- @Test
+ @Test (timeout=180000)
public void testGetForSuperUserWithFewerLabelAuths() throws Throwable {
String[] auths = { SECRET };
String user = "admin";
@@ -196,7 +196,7 @@ public class TestVisibilityLabelsWithACL {
SUPERUSER.runAs(scanAction);
}
- @Test
+ @Test (timeout=180000)
public void testVisibilityLabelsForUserWithNoAuths() throws Throwable {
String user = "admin";
String[] auths = { SECRET };
@@ -227,7 +227,7 @@ public class TestVisibilityLabelsWithACL {
NORMAL_USER2.runAs(getAction);
}
- @Test
+ @Test (timeout=180000)
public void testLabelsTableOpsWithDifferentUsers() throws Throwable {
PrivilegedExceptionAction action =
new PrivilegedExceptionAction() {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithCustomVisLabService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithCustomVisLabService.java
index 5cc72d2..11fe12d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithCustomVisLabService.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithCustomVisLabService.java
@@ -58,7 +58,7 @@ public class TestVisibilityLabelsWithCustomVisLabService extends TestVisibilityL
}
// Extending this test from super as we don't verify predefined labels in ExpAsStringVisibilityLabelServiceImpl
- @Test
+ @Test (timeout=180000)
public void testVisibilityLabelsInPutsThatDoesNotMatchAnyDefinedLabels() throws Exception {
TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
// This put with label "SAMPLE_LABEL" should not get failed.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java
index 52f86c3..863d016 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java
@@ -78,7 +78,7 @@ public class TestVisibilityLabelsWithDefaultVisLabelService extends TestVisibili
addLabels();
}
- @Test
+ @Test (timeout=180000)
public void testAddLabels() throws Throwable {
PrivilegedExceptionAction action =
new PrivilegedExceptionAction() {
@@ -170,7 +170,7 @@ public class TestVisibilityLabelsWithDefaultVisLabelService extends TestVisibili
Assert.assertEquals("The count should be 13", 13, i);
}
- @Test
+ @Test (timeout=180000)
public void testListLabels() throws Throwable {
PrivilegedExceptionAction action =
new PrivilegedExceptionAction() {
@@ -200,7 +200,7 @@ public class TestVisibilityLabelsWithDefaultVisLabelService extends TestVisibili
SUPERUSER.runAs(action);
}
- @Test
+ @Test (timeout=180000)
public void testListLabelsWithRegEx() throws Throwable {
PrivilegedExceptionAction action =
new PrivilegedExceptionAction() {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
index b2d0ae5..a58c1d2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
@@ -108,7 +108,7 @@ public class TestVisibilityLabelsWithDeletes {
public void tearDown() throws Exception {
}
- @Test
+ @Test (timeout=180000)
public void testVisibilityLabelsWithDeleteColumns() throws Throwable {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -150,7 +150,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testVisibilityLabelsWithDeleteFamily() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -191,7 +191,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testVisibilityLabelsWithDeleteFamilyVersion() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -233,7 +233,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testVisibilityLabelsWithDeleteColumnExactVersion() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -275,7 +275,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testVisibilityLabelsWithDeleteColumnsWithMultipleVersions() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -329,7 +329,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testVisibilityLabelsWithDeleteColumnsWithMultipleVersionsNoTimestamp()
throws Exception {
setAuths();
@@ -377,7 +377,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void
testVisibilityLabelsWithDeleteColumnsWithNoMatchVisExpWithMultipleVersionsNoTimestamp()
throws Exception {
@@ -430,7 +430,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testVisibilityLabelsWithDeleteFamilyWithMultipleVersionsNoTimestamp()
throws Exception {
setAuths();
@@ -477,7 +477,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testVisibilityLabelsWithDeleteFamilyWithPutsReAppearing() throws Exception {
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
Admin hBaseAdmin = TEST_UTIL.getHBaseAdmin();
@@ -551,7 +551,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testVisibilityLabelsWithDeleteColumnsWithPutsReAppearing() throws Exception {
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
Admin hBaseAdmin = TEST_UTIL.getHBaseAdmin();
@@ -625,7 +625,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testVisibilityCombinations() throws Exception {
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
Admin hBaseAdmin = TEST_UTIL.getHBaseAdmin();
@@ -675,7 +675,7 @@ public class TestVisibilityLabelsWithDeletes {
assertEquals(next.length, 0);
}
}
- @Test
+ @Test (timeout=180000)
public void testVisibilityLabelsWithDeleteColumnWithSpecificVersionWithPutsReAppearing()
throws Exception {
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -735,7 +735,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void
testVisibilityLabelsWithDeleteFamilyWithNoMatchingVisExpWithMultipleVersionsNoTimestamp()
throws Exception {
@@ -788,7 +788,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteFamilyAndDeleteColumnsWithAndWithoutVisibilityExp() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -982,7 +982,7 @@ public class TestVisibilityLabelsWithDeletes {
}
- @Test
+ @Test (timeout=180000)
public void testDeleteColumnWithSpecificTimeStampUsingMultipleVersionsUnMatchingVisExpression()
throws Exception {
setAuths();
@@ -1047,7 +1047,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteColumnWithLatestTimeStampUsingMultipleVersions() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -1196,7 +1196,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteColumnWithLatestTimeStampUsingMultipleVersionsAfterCompaction()
throws Exception {
setAuths();
@@ -1268,7 +1268,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteFamilyLatestTimeStampWithMulipleVersions() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -1322,7 +1322,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteColumnswithMultipleColumnsWithMultipleVersions() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -1385,7 +1385,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteColumnsWithDiffColsAndTags() throws Exception {
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
Admin hBaseAdmin = TEST_UTIL.getHBaseAdmin();
@@ -1432,7 +1432,7 @@ public class TestVisibilityLabelsWithDeletes {
assertEquals(next.length, 1);
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteColumnsWithDiffColsAndTags1() throws Exception {
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
Admin hBaseAdmin = TEST_UTIL.getHBaseAdmin();
@@ -1479,7 +1479,7 @@ public class TestVisibilityLabelsWithDeletes {
assertEquals(next.length, 1);
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteFamilyWithoutCellVisibilityWithMulipleVersions() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -1522,7 +1522,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteFamilyLatestTimeStampWithMulipleVersionsWithoutCellVisibilityInPuts()
throws Exception {
setAuths();
@@ -1590,7 +1590,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteFamilySpecificTimeStampWithMulipleVersions() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -1650,7 +1650,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testScanAfterCompaction() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -1708,7 +1708,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteFamilySpecificTimeStampWithMulipleVersionsDoneTwice() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -1813,7 +1813,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testMultipleDeleteFamilyVersionWithDiffLabels() throws Exception {
PrivilegedExceptionAction action =
new PrivilegedExceptionAction() {
@@ -2059,7 +2059,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteColumnSpecificTimeStampWithMulipleVersionsDoneTwice() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -2161,7 +2161,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteColumnSpecificTimeStampWithMulipleVersionsDoneTwice1() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -2274,7 +2274,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteColumnSpecificTimeStampWithMulipleVersionsDoneTwice2() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -2393,7 +2393,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteColumnAndDeleteFamilylSpecificTimeStampWithMulipleVersion()
throws Exception {
setAuths();
@@ -2514,7 +2514,7 @@ public class TestVisibilityLabelsWithDeletes {
SUPERUSER.runAs(action);
}
- @Test
+ @Test (timeout=180000)
public void testDiffDeleteTypesForTheSameCellUsingMultipleVersions() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -2617,7 +2617,7 @@ public class TestVisibilityLabelsWithDeletes {
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteColumnLatestWithNoCellVisibility() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -2802,7 +2802,7 @@ public class TestVisibilityLabelsWithDeletes {
row2, 0, row2.length));
}
- @Test
+ @Test (timeout=180000)
public void testVisibilityExpressionWithNotEqualORCondition() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java
index 371d25a..d47d517 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java
@@ -79,7 +79,7 @@ public class TestVisibilityLabelsWithSLGStack {
addLabels();
}
- @Test
+ @Test (timeout=180000)
public void testWithSAGStack() throws Exception {
TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
Table table = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java
index 9e122c9..518cc20 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java
@@ -109,7 +109,7 @@ public class TestVisibilityLablesWithGroups {
});
}
- @Test
+ @Test (timeout=180000)
public void testGroupAuths() throws Exception {
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java
index 457d2eb..8014455 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java
@@ -107,7 +107,7 @@ public class TestVisibilityWithCheckAuths {
SUPERUSER.runAs(action);
}
- @Test
+ @Test (timeout=180000)
public void testVerifyAccessDeniedForInvalidUserAuths() throws Exception {
PrivilegedExceptionAction action =
new PrivilegedExceptionAction() {
@@ -157,7 +157,7 @@ public class TestVisibilityWithCheckAuths {
}
}
- @Test
+ @Test (timeout=180000)
public void testLabelsWithAppend() throws Throwable {
PrivilegedExceptionAction action =
new PrivilegedExceptionAction() {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
index 2774bee..8af2288 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
@@ -141,7 +141,7 @@ public class TestExportSnapshot {
* The getBalanceSplits() function sort it by length,
* and assign to each group a file, going back and forth through the groups.
*/
- @Test
+ @Test (timeout=180000)
public void testBalanceSplit() throws Exception {
// Create a list of files
List> files = new ArrayList>();
@@ -189,23 +189,23 @@ public class TestExportSnapshot {
/**
* Verify if exported snapshot and copied files matches the original one.
*/
- @Test
+ @Test (timeout=180000)
public void testExportFileSystemState() throws Exception {
testExportFileSystemState(tableName, snapshotName, snapshotName, tableNumFiles);
}
- @Test
+ @Test (timeout=180000)
public void testExportFileSystemStateWithSkipTmp() throws Exception {
TEST_UTIL.getConfiguration().setBoolean(ExportSnapshot.CONF_SKIP_TMP, true);
testExportFileSystemState(tableName, snapshotName, snapshotName, tableNumFiles);
}
- @Test
+ @Test (timeout=180000)
public void testEmptyExportFileSystemState() throws Exception {
testExportFileSystemState(tableName, emptySnapshotName, emptySnapshotName, 0);
}
- @Test
+ @Test (timeout=180000)
public void testConsecutiveExports() throws Exception {
Path copyDir = getLocalDestinationDir();
testExportFileSystemState(tableName, snapshotName, snapshotName, tableNumFiles, copyDir, false);
@@ -213,7 +213,7 @@ public class TestExportSnapshot {
removeExportDir(copyDir);
}
- @Test
+ @Test (timeout=180000)
public void testExportWithTargetName() throws Exception {
final byte[] targetName = Bytes.toBytes("testExportWithTargetName");
testExportFileSystemState(tableName, snapshotName, targetName, tableNumFiles);
@@ -223,7 +223,7 @@ public class TestExportSnapshot {
* Mock a snapshot with files in the archive dir,
* two regions, and one reference file.
*/
- @Test
+ @Test (timeout=180000)
public void testSnapshotWithRefsExportFileSystemState() throws Exception {
Configuration conf = TEST_UTIL.getConfiguration();
@@ -311,7 +311,7 @@ public class TestExportSnapshot {
/**
* Check that ExportSnapshot will return a failure if something fails.
*/
- @Test
+ @Test (timeout=180000)
public void testExportFailure() throws Exception {
assertEquals(1, runExportAndInjectFailures(snapshotName, false));
}
@@ -319,7 +319,7 @@ public class TestExportSnapshot {
/**
* Check that ExportSnapshot will succede if something fails but the retry succede.
*/
- @Test
+ @Test (timeout=180000)
public void testExportRetry() throws Exception {
assertEquals(0, runExportAndInjectFailures(snapshotName, true));
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java
index ae1ca13..a685c2d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java
@@ -135,12 +135,12 @@ public class TestRestoreFlushSnapshotFromClient {
SnapshotTestingUtils.deleteArchiveDirectory(UTIL);
}
- @Test
+ @Test (timeout=300000)
public void testTakeFlushSnapshot() throws IOException {
// taking happens in setup.
}
- @Test
+ @Test (timeout=300000)
public void testRestoreSnapshot() throws IOException {
SnapshotTestingUtils.verifyRowCount(UTIL, tableName, snapshot1Rows);
@@ -167,7 +167,7 @@ public class TestRestoreFlushSnapshotFromClient {
admin.cloneSnapshot(snapshotName, tableName);
}
- @Test
+ @Test (timeout=300000)
public void testCloneSnapshot() throws IOException, InterruptedException {
TableName clonedTableName = TableName.valueOf("clonedtb-" + System.currentTimeMillis());
testCloneSnapshot(clonedTableName, snapshotName0, snapshot0Rows);
@@ -183,7 +183,7 @@ public class TestRestoreFlushSnapshotFromClient {
UTIL.deleteTable(tableName);
}
- @Test
+ @Test (timeout=300000)
public void testRestoreSnapshotOfCloned() throws IOException, InterruptedException {
TableName clonedTableName = TableName.valueOf("clonedtb-" + System.currentTimeMillis());
admin.cloneSnapshot(snapshotName0, clonedTableName);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
index 7309580..ebfc020 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
@@ -75,7 +75,7 @@ public class TestRestoreSnapshotHelper {
fs.delete(TEST_UTIL.getDataTestDir(), true);
}
- @Test
+ @Test (timeout=60000)
public void testRestore() throws IOException {
// Test Rolling-Upgrade like Snapshot.
// half machines writing using v1 and the others using v2 format.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java
index f55bb2d..86a0e26 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java
@@ -68,7 +68,7 @@ public class TestSnapshotDescriptionUtils {
private static final Log LOG = LogFactory.getLog(TestSnapshotDescriptionUtils.class);
- @Test
+ @Test (timeout=180000)
public void testValidateMissingTableName() {
Configuration conf = new Configuration(false);
try {
@@ -85,7 +85,7 @@ public class TestSnapshotDescriptionUtils {
* 'complete' the snapshot
* @throws Exception on failure
*/
- @Test
+ @Test (timeout=180000)
public void testCompleteSnapshotWithNoSnapshotDirectoryFailure() throws Exception {
Path snapshotDir = new Path(root, HConstants.SNAPSHOT_DIR_NAME);
Path tmpDir = new Path(snapshotDir, ".tmp");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java
index 7a47f52..85671e3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java
@@ -64,7 +64,7 @@ public class TestHTraceHooks {
rcvr = null;
}
- @Test
+ @Test (timeout=180000)
public void testTraceCreateTable() throws Exception {
TraceScope tableCreationSpan = Trace.startSpan("creating table", Sampler.ALWAYS);
Table table;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBoundedPriorityBlockingQueue.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBoundedPriorityBlockingQueue.java
index 34c4ec0..14726d6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBoundedPriorityBlockingQueue.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBoundedPriorityBlockingQueue.java
@@ -80,7 +80,7 @@ public class TestBoundedPriorityBlockingQueue {
public void tearDown() throws Exception {
}
- @Test
+ @Test (timeout=60000)
public void tesAppend() throws Exception {
// Push
for (int i = 1; i <= CAPACITY; ++i) {
@@ -100,7 +100,7 @@ public class TestBoundedPriorityBlockingQueue {
assertEquals(null, queue.poll());
}
- @Test
+ @Test (timeout=60000)
public void tesAppendSamePriority() throws Exception {
// Push
for (int i = 1; i <= CAPACITY; ++i) {
@@ -120,7 +120,7 @@ public class TestBoundedPriorityBlockingQueue {
assertEquals(null, queue.poll());
}
- @Test
+ @Test (timeout=60000)
public void testPrepend() throws Exception {
// Push
for (int i = 1; i <= CAPACITY; ++i) {
@@ -139,7 +139,7 @@ public class TestBoundedPriorityBlockingQueue {
assertEquals(null, queue.poll());
}
- @Test
+ @Test (timeout=60000)
public void testInsert() throws Exception {
// Push
for (int i = 1; i <= CAPACITY; i += 2) {
@@ -162,7 +162,7 @@ public class TestBoundedPriorityBlockingQueue {
assertEquals(null, queue.poll());
}
- @Test
+ @Test (timeout=60000)
public void testFifoSamePriority() throws Exception {
assertTrue(CAPACITY >= 6);
for (int i = 0; i < 6; ++i) {
@@ -183,7 +183,7 @@ public class TestBoundedPriorityBlockingQueue {
assertEquals(null, queue.poll());
}
- @Test
+ @Test (timeout=60000)
public void testPoll() {
assertNull(queue.poll());
PriorityQueue testList = new PriorityQueue(CAPACITY, new TestObjectComparator());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java
index 8a48d32..8484214 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java
@@ -94,7 +94,7 @@ public class TestByteBufferUtils {
assertEquals(Long.MAX_VALUE, a.last().longValue());
}
- @Test
+ @Test (timeout=60000)
public void testReadWriteVLong() {
for (long l : testNumbers) {
ByteBuffer b = ByteBuffer.allocate(MAX_VLONG_LENGTH);
@@ -104,7 +104,7 @@ public class TestByteBufferUtils {
}
}
- @Test
+ @Test (timeout=60000)
public void testConsistencyWithHadoopVLong() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
@@ -123,7 +123,7 @@ public class TestByteBufferUtils {
/**
* Test copying to stream from buffer.
*/
- @Test
+ @Test (timeout=60000)
public void testMoveBufferToStream() {
final int arrayOffset = 7;
final int initialPosition = 10;
@@ -151,7 +151,7 @@ public class TestByteBufferUtils {
* Test copying to stream from buffer with offset.
* @throws IOException On test failure.
*/
- @Test
+ @Test (timeout=60000)
public void testCopyToStreamWithOffset() throws IOException {
ByteBuffer buffer = ByteBuffer.wrap(array);
@@ -171,7 +171,7 @@ public class TestByteBufferUtils {
* Test copying data from stream.
* @throws IOException On test failure.
*/
- @Test
+ @Test (timeout=60000)
public void testCopyFromStream() throws IOException {
ByteBuffer buffer = ByteBuffer.allocate(array.length);
ByteArrayInputStream bis = new ByteArrayInputStream(array);
@@ -188,7 +188,7 @@ public class TestByteBufferUtils {
/**
* Test copying from buffer.
*/
- @Test
+ @Test (timeout=60000)
public void testCopyFromBuffer() {
ByteBuffer srcBuffer = ByteBuffer.allocate(array.length);
ByteBuffer dstBuffer = ByteBuffer.allocate(array.length);
@@ -206,7 +206,7 @@ public class TestByteBufferUtils {
* Test 7-bit encoding of integers.
* @throws IOException On test failure.
*/
- @Test
+ @Test (timeout=60000)
public void testCompressedInt() throws IOException {
testCompressedInt(0);
testCompressedInt(Integer.MAX_VALUE);
@@ -224,7 +224,7 @@ public class TestByteBufferUtils {
/**
* Test how much bytes we need to store integer.
*/
- @Test
+ @Test (timeout=60000)
public void testIntFitsIn() {
assertEquals(1, ByteBufferUtils.intFitsIn(0));
assertEquals(1, ByteBufferUtils.intFitsIn(1));
@@ -238,7 +238,7 @@ public class TestByteBufferUtils {
/**
* Test how much bytes we need to store long.
*/
- @Test
+ @Test (timeout=60000)
public void testLongFitsIn() {
assertEquals(1, ByteBufferUtils.longFitsIn(0));
assertEquals(1, ByteBufferUtils.longFitsIn(1));
@@ -252,7 +252,7 @@ public class TestByteBufferUtils {
/**
* Test if we are comparing equal bytes.
*/
- @Test
+ @Test (timeout=60000)
public void testArePartEqual() {
byte[] array = new byte[] { 1, 2, 3, 4, 5, 1, 2, 3, 4 };
ByteBuffer buffer = ByteBuffer.wrap(array);
@@ -266,7 +266,7 @@ public class TestByteBufferUtils {
/**
* Test serializing int to bytes
*/
- @Test
+ @Test (timeout=60000)
public void testPutInt() {
testPutInt(0);
testPutInt(Integer.MAX_VALUE);
@@ -312,7 +312,7 @@ public class TestByteBufferUtils {
}
}
- @Test
+ @Test (timeout=60000)
public void testToBytes(){
ByteBuffer buffer = ByteBuffer.allocate(5);
buffer.put(new byte[]{0,1,2,3,4});
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java
index c5bd284..40b69aa 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java
@@ -43,7 +43,7 @@ import static org.junit.Assert.*;
public class TestCompressionTest {
static final Log LOG = LogFactory.getLog(TestCompressionTest.class);
- @Test
+ @Test (timeout=60000)
public void testExceptionCaching() {
// This test will fail if you run the tests with LZO compression available.
try {
@@ -66,7 +66,7 @@ public class TestCompressionTest {
assertFalse(CompressionTest.testCompression("LZO"));
}
- @Test
+ @Test (timeout=60000)
public void testTestCompression() {
assertTrue(CompressionTest.testCompression("NONE"));
assertTrue(CompressionTest.testCompression("GZ"));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java
index 7600388..3b67405 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java
@@ -103,7 +103,7 @@ public class TestCoprocessorScanPolicy {
.setBoolean(StoreScanner.STORESCANNER_PARALLEL_SEEK_ENABLE, parallelSeekEnable);
}
- @Test
+ @Test (timeout=180000)
public void testBaseCases() throws Exception {
TableName tableName =
TableName.valueOf("baseCases");
@@ -153,7 +153,7 @@ public class TestCoprocessorScanPolicy {
t.close();
}
- @Test
+ @Test (timeout=180000)
public void testTTL() throws Exception {
TableName tableName =
TableName.valueOf("testTTL");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestDefaultEnvironmentEdge.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestDefaultEnvironmentEdge.java
index 3cb1f18..8b440ef 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestDefaultEnvironmentEdge.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestDefaultEnvironmentEdge.java
@@ -33,7 +33,7 @@ import static junit.framework.Assert.fail;
@Category({MiscTests.class, MediumTests.class})
public class TestDefaultEnvironmentEdge {
- @Test
+ @Test (timeout=180000)
public void testGetCurrentTimeUsesSystemClock() {
DefaultEnvironmentEdge edge = new DefaultEnvironmentEdge();
long systemTime = System.currentTimeMillis();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java
index 5d2f04f..9df3072 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java
@@ -39,7 +39,7 @@ import org.junit.experimental.categories.Category;
@Category({MiscTests.class, SmallTests.class})
public class TestEncryptionTest {
- @Test
+ @Test (timeout=60000)
public void testTestKeyProvider() {
Configuration conf = HBaseConfiguration.create();
try {
@@ -55,7 +55,7 @@ public class TestEncryptionTest {
} catch (Exception e) { }
}
- @Test
+ @Test (timeout=60000)
public void testTestCipherProvider() {
Configuration conf = HBaseConfiguration.create();
try {
@@ -71,7 +71,7 @@ public class TestEncryptionTest {
} catch (Exception e) { }
}
- @Test
+ @Test (timeout=60000)
public void testTestCipher() {
Configuration conf = HBaseConfiguration.create();
conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java
index ea19ea7..413fec0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java
@@ -100,7 +100,7 @@ public class TestFSHDFSUtils {
Mockito.verify(dfs, Mockito.times(1)).isFileClosed(FILE);
}
- @Test
+ @Test (timeout=180000)
public void testIsSameHdfs() throws IOException {
try {
Class dfsUtilClazz = Class.forName("org.apache.hadoop.hdfs.DFSUtil");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
index c09982e..1b3e0d1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
@@ -72,7 +72,7 @@ public class TestFSTableDescriptors {
FSTableDescriptors.getTableInfoSequenceId(p);
}
- @Test
+ @Test (timeout=180000)
public void testCreateAndUpdate() throws IOException {
Path testdir = UTIL.getDataTestDir("testCreateAndUpdate");
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("testCreate"));
@@ -93,7 +93,7 @@ public class TestFSTableDescriptors {
assertTrue(statuses.length == 0);
}
- @Test
+ @Test (timeout=180000)
public void testSequenceIdAdvancesOnTableInfo() throws IOException {
Path testdir = UTIL.getDataTestDir("testSequenceidAdvancesOnTableInfo");
HTableDescriptor htd = new HTableDescriptor(
@@ -123,7 +123,7 @@ public class TestFSTableDescriptors {
assertEquals(descriptor, td);
}
- @Test
+ @Test (timeout=180000)
public void testFormatTableInfoSequenceId() {
Path p0 = assertWriteAndReadSequenceId(0);
// Assert p0 has format we expect.
@@ -155,7 +155,7 @@ public class TestFSTableDescriptors {
return p;
}
- @Test
+ @Test (timeout=180000)
public void testRemoves() throws IOException {
final String name = "testRemoves";
FileSystem fs = FileSystem.get(UTIL.getConfiguration());
@@ -252,7 +252,7 @@ public class TestFSTableDescriptors {
htds.cachehits >= (count * 2));
}
- @Test
+ @Test (timeout=180000)
public void testHTableDescriptorsNoCache()
throws IOException, InterruptedException {
final String name = "testHTableDescriptorsNoCache";
@@ -286,7 +286,7 @@ public class TestFSTableDescriptors {
assertEquals("expected=0, actual=" + htds.cachehits, 0, htds.cachehits);
}
- @Test
+ @Test (timeout=180000)
public void testGetAll()
throws IOException, InterruptedException {
final String name = "testGetAll";
@@ -310,7 +310,7 @@ public class TestFSTableDescriptors {
}
- @Test
+ @Test (timeout=180000)
public void testCacheConsistency()
throws IOException, InterruptedException {
final String name = "testCacheConsistency";
@@ -351,7 +351,7 @@ public class TestFSTableDescriptors {
}
}
- @Test
+ @Test (timeout=180000)
public void testNoSuchTable() throws IOException {
final String name = "testNoSuchTable";
FileSystem fs = FileSystem.get(UTIL.getConfiguration());
@@ -362,7 +362,7 @@ public class TestFSTableDescriptors {
htds.get(TableName.valueOf("NoSuchTable")));
}
- @Test
+ @Test (timeout=180000)
public void testUpdates() throws IOException {
final String name = "testUpdates";
FileSystem fs = FileSystem.get(UTIL.getConfiguration());
@@ -375,7 +375,7 @@ public class TestFSTableDescriptors {
htds.add(htd);
}
- @Test
+ @Test (timeout=180000)
public void testTableInfoFileStatusComparator() {
FileStatus bare =
new FileStatus(0, false, 0, 0, -1,
@@ -401,7 +401,7 @@ public class TestFSTableDescriptors {
}
}
- @Test
+ @Test (timeout=180000)
public void testReadingInvalidDirectoryFromFS() throws IOException {
FileSystem fs = FileSystem.get(UTIL.getConfiguration());
try {
@@ -415,7 +415,7 @@ public class TestFSTableDescriptors {
}
}
- @Test
+ @Test (timeout=180000)
public void testCreateTableDescriptorUpdatesIfExistsAlready() throws IOException {
Path testdir = UTIL.getDataTestDir("testCreateTableDescriptorUpdatesIfThereExistsAlready");
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java
index c8b2285..dd5ad11 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java
@@ -61,7 +61,7 @@ public class TestFSUtils {
* Test path compare and prefix checking.
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testMatchingTail() throws IOException {
HBaseTestingUtility htu = new HBaseTestingUtility();
final FileSystem fs = htu.getTestFileSystem();
@@ -84,7 +84,7 @@ public class TestFSUtils {
assertFalse(FSUtils.isMatchingTail(new Path("x"), fullPath));
}
- @Test
+ @Test (timeout=180000)
public void testVersion() throws DeserializationException, IOException {
HBaseTestingUtility htu = new HBaseTestingUtility();
final FileSystem fs = htu.getTestFileSystem();
@@ -235,7 +235,7 @@ public class TestFSUtils {
}
}
- @Test
+ @Test (timeout=180000)
public void testPermMask() throws Exception {
Configuration conf = HBaseConfiguration.create();
@@ -276,7 +276,7 @@ public class TestFSUtils {
}
}
- @Test
+ @Test (timeout=180000)
public void testDeleteAndExists() throws Exception {
HBaseTestingUtility htu = new HBaseTestingUtility();
Configuration conf = htu.getConfiguration();
@@ -306,7 +306,7 @@ public class TestFSUtils {
}
}
- @Test
+ @Test (timeout=180000)
public void testRenameAndSetModifyTime() throws Exception {
HBaseTestingUtility htu = new HBaseTestingUtility();
Configuration conf = htu.getConfiguration();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java
index d1516ca..9f51c8f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java
@@ -84,7 +84,7 @@ public class TestFSVisitor {
fs.delete(rootDir, true);
}
- @Test
+ @Test (timeout=180000)
public void testVisitStoreFiles() throws IOException {
final Set regions = new HashSet();
final Set families = new HashSet();
@@ -102,7 +102,7 @@ public class TestFSVisitor {
assertEquals(tableHFiles, hfiles);
}
- @Test
+ @Test (timeout=180000)
public void testVisitRecoveredEdits() throws IOException {
final Set regions = new HashSet();
final Set edits = new HashSet();
@@ -117,7 +117,7 @@ public class TestFSVisitor {
assertEquals(recoveredEdits, edits);
}
- @Test
+ @Test (timeout=180000)
public void testVisitLogFiles() throws IOException {
final Set servers = new HashSet();
final Set logs = new HashSet();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckComparator.java
index acd62b1..5dfb12f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckComparator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckComparator.java
@@ -51,7 +51,7 @@ public class TestHBaseFsckComparator {
time));
}
- @Test
+ @Test (timeout=60000)
public void testEquals() {
HbckInfo hi1 = genHbckInfo(table, keyA, keyB, 0);
HbckInfo hi2 = genHbckInfo(table, keyA, keyB, 0);
@@ -59,7 +59,7 @@ public class TestHBaseFsckComparator {
assertEquals(0, HBaseFsck.cmp.compare(hi2, hi1));
}
- @Test
+ @Test (timeout=60000)
public void testEqualsInstance() {
HbckInfo hi1 = genHbckInfo(table, keyA, keyB, 0);
HbckInfo hi2 = hi1;
@@ -67,7 +67,7 @@ public class TestHBaseFsckComparator {
assertEquals(0, HBaseFsck.cmp.compare(hi2, hi1));
}
- @Test
+ @Test (timeout=60000)
public void testDiffTable() {
HbckInfo hi1 = genHbckInfo(table, keyA, keyC, 0);
HbckInfo hi2 = genHbckInfo(table2, keyA, keyC, 0);
@@ -75,7 +75,7 @@ public class TestHBaseFsckComparator {
assertTrue(HBaseFsck.cmp.compare(hi2, hi1) > 0);
}
- @Test
+ @Test (timeout=60000)
public void testDiffStartKey() {
HbckInfo hi1 = genHbckInfo(table, keyStart, keyC, 0);
HbckInfo hi2 = genHbckInfo(table, keyA, keyC, 0);
@@ -83,7 +83,7 @@ public class TestHBaseFsckComparator {
assertTrue(HBaseFsck.cmp.compare(hi2, hi1) > 0);
}
- @Test
+ @Test (timeout=60000)
public void testDiffEndKey() {
HbckInfo hi1 = genHbckInfo(table, keyA, keyB, 0);
HbckInfo hi2 = genHbckInfo(table, keyA, keyC, 0);
@@ -91,7 +91,7 @@ public class TestHBaseFsckComparator {
assertTrue(HBaseFsck.cmp.compare(hi2, hi1) > 0);
}
- @Test
+ @Test (timeout=60000)
public void testAbsEndKey() {
HbckInfo hi1 = genHbckInfo(table, keyA, keyC, 0);
HbckInfo hi2 = genHbckInfo(table, keyA, keyEnd, 0);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
index 7c289a1..09488db 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
@@ -102,7 +102,7 @@ public class TestHBaseFsckEncryption {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=300000)
public void testFsckWithEncryption() throws Exception {
// Populate the table with some data
Table table = TEST_UTIL.getConnection().getTable(htd.getTableName());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java
index ab14c41..05a220d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java
@@ -36,27 +36,27 @@ import java.io.IOException;
@Category({MiscTests.class, SmallTests.class})
public class TestHFileArchiveUtil {
private Path rootDir = new Path("./");
- @Test
+ @Test (timeout=60000)
public void testGetTableArchivePath() {
assertNotNull(HFileArchiveUtil.getTableArchivePath(rootDir,
TableName.valueOf("table")));
}
- @Test
+ @Test (timeout=60000)
public void testGetArchivePath() throws Exception {
Configuration conf = new Configuration();
FSUtils.setRootDir(conf, new Path("root"));
assertNotNull(HFileArchiveUtil.getArchivePath(conf));
}
- @Test
+ @Test (timeout=60000)
public void testRegionArchiveDir() {
Path regionDir = new Path("region");
assertNotNull(HFileArchiveUtil.getRegionArchiveDir(rootDir,
TableName.valueOf("table"), regionDir));
}
- @Test
+ @Test (timeout=60000)
public void testGetStoreArchivePath() throws IOException {
byte[] family = Bytes.toBytes("Family");
Path tabledir = FSUtils.getTableDir(rootDir,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java
index fbfbb47..1bf76e5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java
@@ -91,7 +91,7 @@ public class TestIdLock {
}
- @Test
+ @Test (timeout=180000)
public void testMultipleClients() throws Exception {
ExecutorService exec = Executors.newFixedThreadPool(NUM_THREADS);
try {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java
index 4650ced..fcd57d6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java
@@ -32,7 +32,7 @@ import org.junit.experimental.categories.Category;
@Category({MiscTests.class, SmallTests.class})
public class TestIncrementingEnvironmentEdge {
- @Test
+ @Test (timeout=60000)
public void testGetCurrentTimeUsesSystemClock() {
IncrementingEnvironmentEdge edge = new IncrementingEnvironmentEdge(1);
assertEquals(1, edge.currentTime());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
index b04e5de..eea30dc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
@@ -259,7 +259,7 @@ public class TestMergeTool extends HBaseTestCase {
* Test merge tool.
* @throws Exception
*/
- @Test
+ @Test (timeout=300000)
public void testMergeTool() throws Exception {
// First verify we can read the rows from the source regions and that they
// contain the right data.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestProcessBasedCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestProcessBasedCluster.java
index 513d538..b6fd81c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestProcessBasedCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestProcessBasedCluster.java
@@ -86,7 +86,7 @@ public class TestProcessBasedCluster {
}
}
- @Test
+ @Test (timeout=180000)
public void testHomePath() {
File pom = new File(HBaseHomePath.getHomePath(), "pom.xml");
assertTrue(pom.getPath() + " does not exist", pom.exists());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSizeCalculator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSizeCalculator.java
index 8b74112..77ae590 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSizeCalculator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSizeCalculator.java
@@ -49,7 +49,7 @@ public class TestRegionSizeCalculator {
private Configuration configuration = new Configuration();
private final long megabyte = 1024L * 1024L;
- @Test
+ @Test (timeout=60000)
public void testSimpleTestCase() throws Exception {
RegionLocator regionLocator = mockRegionLocator("region1", "region2", "region3");
@@ -81,7 +81,7 @@ public class TestRegionSizeCalculator {
* When size of region in megabytes is larger than largest possible integer there could be
* error caused by lost of precision.
* */
- @Test
+ @Test (timeout=60000)
public void testLargeRegion() throws Exception {
RegionLocator regionLocator = mockRegionLocator("largeRegion");
@@ -98,7 +98,7 @@ public class TestRegionSizeCalculator {
}
/** When calculator is disabled, it should return 0 for each request.*/
- @Test
+ @Test (timeout=60000)
public void testDisabled() throws Exception {
String regionName = "cz.goout:/index.html";
RegionLocator table = mockRegionLocator(regionName);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java
index c35491d..a07da59 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java
@@ -121,7 +121,7 @@ public class TestRegionSplitCalculator {
return s;
}
- @Test
+ @Test (timeout=60000)
public void testSplitCalculator() {
SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B"));
SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("C"));
@@ -140,7 +140,7 @@ public class TestRegionSplitCalculator {
+ "D:\t\n");
}
- @Test
+ @Test (timeout=60000)
public void testSplitCalculatorNoEdge() {
RegionSplitCalculator sc = new RegionSplitCalculator(
cmp);
@@ -152,7 +152,7 @@ public class TestRegionSplitCalculator {
assertEquals("", res);
}
- @Test
+ @Test (timeout=60000)
public void testSplitCalculatorSingleEdge() {
SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B"));
RegionSplitCalculator sc = new RegionSplitCalculator(
@@ -166,7 +166,7 @@ public class TestRegionSplitCalculator {
assertEquals("A:\t[A, B]\t\n" + "B:\t\n", res);
}
- @Test
+ @Test (timeout=60000)
public void testSplitCalculatorDegenerateEdge() {
SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("A"));
RegionSplitCalculator sc = new RegionSplitCalculator(
@@ -180,7 +180,7 @@ public class TestRegionSplitCalculator {
assertEquals("A:\t[A, A]\t\n", res);
}
- @Test
+ @Test (timeout=60000)
public void testSplitCalculatorCoverSplit() {
SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B"));
SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("C"));
@@ -199,7 +199,7 @@ public class TestRegionSplitCalculator {
+ "C:\t\n", res);
}
- @Test
+ @Test (timeout=60000)
public void testSplitCalculatorOverEndpoint() {
SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B"));
SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("C"));
@@ -218,7 +218,7 @@ public class TestRegionSplitCalculator {
+ "C:\t[B, D]\t\n" + "D:\t\n", res);
}
- @Test
+ @Test (timeout=60000)
public void testSplitCalculatorHoles() {
SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B"));
SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("C"));
@@ -237,7 +237,7 @@ public class TestRegionSplitCalculator {
+ "E:\t[E, F]\t\n" + "F:\t\n", res);
}
- @Test
+ @Test (timeout=60000)
public void testSplitCalculatorOverreach() {
SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C"));
SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("D"));
@@ -254,7 +254,7 @@ public class TestRegionSplitCalculator {
+ "C:\t[B, D]\t\n" + "D:\t\n", res);
}
- @Test
+ @Test (timeout=60000)
public void testSplitCalculatorFloor() {
SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C"));
SimpleRange b = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B"));
@@ -270,7 +270,7 @@ public class TestRegionSplitCalculator {
assertEquals("A:\t[A, B]\t[A, C]\t\n" + "B:\t[A, C]\t\n" + "C:\t\n", res);
}
- @Test
+ @Test (timeout=60000)
public void testSplitCalculatorCeil() {
SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C"));
SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("C"));
@@ -286,7 +286,7 @@ public class TestRegionSplitCalculator {
assertEquals("A:\t[A, C]\t\n" + "B:\t[A, C]\t[B, C]\t\n" + "C:\t\n", res);
}
- @Test
+ @Test (timeout=60000)
public void testSplitCalculatorEq() {
SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C"));
SimpleRange b = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C"));
@@ -304,7 +304,7 @@ public class TestRegionSplitCalculator {
assertEquals("A:\t[A, C]\t[A, C]\t\n" + "C:\t\n", res);
}
- @Test
+ @Test (timeout=60000)
public void testSplitCalculatorBackwards() {
SimpleRange a = new SimpleRange(Bytes.toBytes("C"), Bytes.toBytes("A"));
RegionSplitCalculator sc = new RegionSplitCalculator(
@@ -318,7 +318,7 @@ public class TestRegionSplitCalculator {
assertEquals("", res);
}
- @Test
+ @Test (timeout=60000)
public void testComplex() {
RegionSplitCalculator sc = new RegionSplitCalculator(
cmp);
@@ -342,7 +342,7 @@ public class TestRegionSplitCalculator {
+ "H:\t[H, I]\t\n" + "I:\t\n", res);
}
- @Test
+ @Test (timeout=60000)
public void testBeginEndMarker() {
RegionSplitCalculator sc = new RegionSplitCalculator(
cmp);
@@ -358,7 +358,7 @@ public class TestRegionSplitCalculator {
+ "null:\t\n", res);
}
- @Test
+ @Test (timeout=60000)
public void testBigRanges() {
SimpleRange ai = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("I"));
SimpleRange ae = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("E"));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java
index 63154a8..fedd760 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java
@@ -71,7 +71,7 @@ public class TestRegionSplitter {
/**
* Test creating a pre-split table using the HexStringSplit algorithm.
*/
- @Test
+ @Test (timeout=180000)
public void testCreatePresplitTableHex() throws Exception {
final List expectedBounds = new ArrayList();
expectedBounds.add(ArrayUtils.EMPTY_BYTE_ARRAY);
@@ -101,7 +101,7 @@ public class TestRegionSplitter {
/**
* Test creating a pre-split table using the UniformSplit algorithm.
*/
- @Test
+ @Test (timeout=180000)
public void testCreatePresplitTableUniform() throws Exception {
List expectedBounds = new ArrayList();
expectedBounds.add(ArrayUtils.EMPTY_BYTE_ARRAY);
@@ -131,7 +131,7 @@ public class TestRegionSplitter {
* Unit tests for the HexStringSplit algorithm. Makes sure it divides up the
* space of keys in the way that we expect.
*/
- @Test
+ @Test (timeout=180000)
public void unitTestHexStringSplit() {
HexStringSplit splitter = new HexStringSplit();
// Check splitting while starting from scratch
@@ -169,7 +169,7 @@ public class TestRegionSplitter {
* Unit tests for the UniformSplit algorithm. Makes sure it divides up the space of
* keys in the way that we expect.
*/
- @Test
+ @Test (timeout=180000)
public void unitTestUniformSplit() {
UniformSplit splitter = new UniformSplit();
@@ -213,7 +213,7 @@ public class TestRegionSplitter {
assertArrayEquals(splitPoint, new byte[] {'a', 'a', 'a', (byte)0x80 });
}
- @Test
+ @Test (timeout=180000)
public void testUserInput() {
SplitAlgorithm algo = new HexStringSplit();
assertFalse(splitFailsPrecondition(algo)); // default settings are fine
@@ -287,7 +287,7 @@ public class TestRegionSplitter {
verifyBounds(expectedBounds, tableName);
}
- @Test
+ @Test (timeout=180000)
public void noopRollingSplit() throws Exception {
final List expectedBounds = new ArrayList();
expectedBounds.add(ArrayUtils.EMPTY_BYTE_ARRAY);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedCopyOnWriteSet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedCopyOnWriteSet.java
index 839d1cc..ee96807 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedCopyOnWriteSet.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedCopyOnWriteSet.java
@@ -32,7 +32,7 @@ import org.junit.experimental.categories.Category;
@Category({MiscTests.class, SmallTests.class})
public class TestSortedCopyOnWriteSet {
- @Test
+ @Test (timeout=60000)
public void testSorting() throws Exception {
SortedCopyOnWriteSet set = new SortedCopyOnWriteSet();
set.add("c");
@@ -50,7 +50,7 @@ public class TestSortedCopyOnWriteSet {
assertArrayEquals(expected, stored);
}
- @Test
+ @Test (timeout=60000)
public void testIteratorIsolation() throws Exception {
SortedCopyOnWriteSet set = new SortedCopyOnWriteSet(
Lists.newArrayList("a", "b", "c", "d", "e"));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableName.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableName.java
index f585f47..1a6722f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableName.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableName.java
@@ -88,14 +88,14 @@ public class TestTableName extends TestWatcher {
}
}
- @Test
+ @Test (timeout=180000)
public void testLegalHTableNames() {
for (String tn : legalTableNames) {
TableName.isLegalFullyQualifiedTableName(Bytes.toBytes(tn));
}
}
- @Test
+ @Test (timeout=180000)
public void testIllegalHTableNames() {
for (String tn : illegalTableNames) {
try {
@@ -158,7 +158,7 @@ public class TestTableName extends TestWatcher {
new Names("n2", "table2")
};
- @Test
+ @Test (timeout=180000)
public void testValueOf() {
Map inCache = new HashMap<>();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingProvider.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingProvider.java
index 1c7813b..bc14221 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingProvider.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingProvider.java
@@ -99,7 +99,7 @@ public class TestBoundedRegionGroupingProvider {
/**
* Write to a log file with three concurrent threads and verifying all data is written.
*/
- @Test
+ @Test (timeout=300000)
public void testConcurrentWrites() throws Exception {
// Run the WPE tool with three threads writing 3000 edits each concurrently.
// When done, verify that all edits were written.
@@ -111,7 +111,7 @@ public class TestBoundedRegionGroupingProvider {
/**
* Make sure we can successfully run with more regions then our bound.
*/
- @Test
+ @Test (timeout=300000)
public void testMoreRegionsThanBound() throws Exception {
final String parallelism = Integer.toString(DEFAULT_NUM_REGION_GROUPS * 2);
int errCode = WALPerformanceEvaluation.innerMain(new Configuration(conf),
@@ -120,7 +120,7 @@ public class TestBoundedRegionGroupingProvider {
assertEquals(0, errCode);
}
- @Test
+ @Test (timeout=300000)
public void testBoundsGreaterThanDefault() throws Exception {
final int temp = conf.getInt(NUM_REGION_GROUPS, DEFAULT_NUM_REGION_GROUPS);
try {
@@ -135,7 +135,7 @@ public class TestBoundedRegionGroupingProvider {
}
}
- @Test
+ @Test (timeout=300000)
public void testMoreRegionsThanBoundWithBoundsGreaterThanDefault() throws Exception {
final int temp = conf.getInt(NUM_REGION_GROUPS, DEFAULT_NUM_REGION_GROUPS);
try {
@@ -153,7 +153,7 @@ public class TestBoundedRegionGroupingProvider {
/**
* Ensure that we can use Set.add to deduplicate WALs
*/
- @Test
+ @Test (timeout=300000)
public void setMembershipDedups() throws IOException {
final int temp = conf.getInt(NUM_REGION_GROUPS, DEFAULT_NUM_REGION_GROUPS);
WALFactory wals = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDefaultWALProvider.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDefaultWALProvider.java
index df8ceaf..f5c5e32 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDefaultWALProvider.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDefaultWALProvider.java
@@ -115,7 +115,7 @@ public class TestDefaultWALProvider {
return "TestDefaultWALProvider";
}
- @Test
+ @Test (timeout=180000)
public void testGetServerNameFromWALDirectoryName() throws IOException {
ServerName sn = ServerName.valueOf("hn", 450, 1398);
String hl = FSUtils.getRootDir(conf) + "/" +
@@ -179,7 +179,7 @@ public class TestDefaultWALProvider {
private static final byte[] UNSPECIFIED_REGION = new byte[]{};
- @Test
+ @Test (timeout=180000)
public void testLogCleaning() throws Exception {
LOG.info("testLogCleaning");
final HTableDescriptor htd =
@@ -254,7 +254,7 @@ public class TestDefaultWALProvider {
*
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testWALArchiving() throws IOException {
LOG.debug("testWALArchiving");
HTableDescriptor table1 =
@@ -329,7 +329,7 @@ public class TestDefaultWALProvider {
* Write to a log file with three concurrent threads and verifying all data is written.
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testConcurrentWrites() throws Exception {
// Run the WPE tool with three threads writing 3000 edits each concurrently.
// When done, verify that all edits were written.
@@ -342,7 +342,7 @@ public class TestDefaultWALProvider {
/**
* Ensure that we can use Set.add to deduplicate WALs
*/
- @Test
+ @Test (timeout=180000)
public void setMembershipDedups() throws IOException {
final Configuration localConf = new Configuration(conf);
localConf.set(WALFactory.WAL_PROVIDER, DefaultWALProvider.class.getName());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java
index 6f05839..65e91bf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java
@@ -78,7 +78,7 @@ public class TestSecureWAL {
FSUtils.setRootDir(conf, TEST_UTIL.getDataTestDir());
}
- @Test
+ @Test (timeout=180000)
public void testSecureWAL() throws Exception {
TableName tableName = TableName.valueOf("TestSecureWAL");
HTableDescriptor htd = new HTableDescriptor(tableName);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
index bbe4018..4ccaa46 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
@@ -152,7 +152,7 @@ public class TestWALFactory {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @Test (timeout=180000)
public void canCloseSingleton() throws IOException {
WALFactory.getInstance(conf).close();
}
@@ -162,7 +162,7 @@ public class TestWALFactory {
* would fail.
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testSplit() throws IOException {
final TableName tableName = TableName.valueOf(currentTest.getMethodName());
final byte [] rowName = tableName.getName();
@@ -211,7 +211,7 @@ public class TestWALFactory {
* Test new HDFS-265 sync.
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void Broken_testSync() throws Exception {
TableName tableName = TableName.valueOf(currentTest.getMethodName());
// First verify that using streams all works.
@@ -477,7 +477,7 @@ public class TestWALFactory {
* Tests that we can write out an edit, close, and then read it back in again.
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testEditAdd() throws IOException {
final int COL_COUNT = 10;
final HTableDescriptor htd =
@@ -535,7 +535,7 @@ public class TestWALFactory {
/**
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testAppend() throws IOException {
final int COL_COUNT = 10;
final HTableDescriptor htd =
@@ -590,7 +590,7 @@ public class TestWALFactory {
* Test that we can visit entries before they are appended
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testVisitors() throws Exception {
final int COL_COUNT = 10;
final TableName tableName =
@@ -630,7 +630,7 @@ public class TestWALFactory {
/**
* A loaded WAL coprocessor won't break existing WAL test cases.
*/
- @Test
+ @Test (timeout=180000)
public void testWALCoprocessorLoaded() throws Exception {
// test to see whether the coprocessor is loaded or not.
WALCoprocessorHost host = wals.getWAL(UNSPECIFIED_REGION).getCoprocessorHost();
@@ -641,7 +641,7 @@ public class TestWALFactory {
/**
* @throws IOException
*/
- @Test
+ @Test (timeout=180000)
public void testReadLegacyLog() throws IOException {
final int columnCount = 5;
final int recordCount = 5;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java
index 66868a1..8d26a5a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java
@@ -103,7 +103,7 @@ public class TestWALFiltering {
TEST_UTIL.waitUntilAllRegionsAssigned(TABLE_NAME);
}
- @Test
+ @Test (timeout=180000)
public void testFlushedSequenceIdsSentToHMaster()
throws IOException, InterruptedException, ServiceException {
SortedMap allFlushedSequenceIds =
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALMethods.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALMethods.java
index 0c03019..c5b6f72 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALMethods.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALMethods.java
@@ -107,7 +107,7 @@ public class TestWALMethods {
fdos.close();
}
- @Test
+ @Test (timeout=60000)
public void testRegionEntryBuffer() throws Exception {
WALSplitter.RegionEntryBuffer reb = new WALSplitter.RegionEntryBuffer(
TEST_TABLE, TEST_REGION);
@@ -117,7 +117,7 @@ public class TestWALMethods {
assertTrue(reb.heapSize() > 0);
}
- @Test
+ @Test (timeout=60000)
public void testEntrySink() throws Exception {
Configuration conf = new Configuration();
RecoveryMode mode = (conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false) ?
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestRecoverableZooKeeper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestRecoverableZooKeeper.java
index e83ac74..454ee7d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestRecoverableZooKeeper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestRecoverableZooKeeper.java
@@ -68,7 +68,7 @@ public class TestRecoverableZooKeeper {
TEST_UTIL.shutdownMiniZKCluster();
}
- @Test
+ @Test (timeout=180000)
public void testSetDataVersionMismatchInLoop() throws Exception {
String znode = "/hbase/splitWAL/9af7cfc9b15910a0b3d714bf40a3248f";
Configuration conf = TEST_UTIL.getConfiguration();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKConfig.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKConfig.java
index eae7c2a..ab38668 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKConfig.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKConfig.java
@@ -31,7 +31,7 @@ import org.junit.experimental.categories.Category;
@Category({MiscTests.class, SmallTests.class})
public class TestZKConfig {
- @Test
+ @Test (timeout=60000)
public void testZKConfigLoading() throws Exception {
// Test depends on test resource 'zoo.cfg' at src/test/resources/zoo.cfg
Configuration conf = HBaseConfiguration.create();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKLeaderManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKLeaderManager.java
index c830b04..585771b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKLeaderManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKLeaderManager.java
@@ -143,7 +143,7 @@ public class TestZKLeaderManager {
TEST_UTIL.shutdownMiniZKCluster();
}
- @Test
+ @Test (timeout=180000)
public void testLeaderSelection() throws Exception {
MockLeader currentLeader = getCurrentLeader();
// one leader should have been found
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java
index 93a6291..0c4a1ff 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java
@@ -249,7 +249,7 @@ public class TestZooKeeperACL {
* Finally, we check the ACLs of a node outside of the /hbase hierarchy and
* verify that its ACL is simply 'hbase:Perms.ALL'.
*/
- @Test
+ @Test (timeout=180000)
public void testOutsideHBaseNodeACL() throws Exception {
if (!secureZKAvailable) {
return;
@@ -267,7 +267,7 @@ public class TestZooKeeperACL {
/**
* Check if ZooKeeper JaasConfiguration is valid.
*/
- @Test
+ @Test (timeout=180000)
public void testIsZooKeeperSecure() throws Exception {
boolean testJaasConfig = ZKUtil.isSecureZooKeeper(new Configuration(TEST_UTIL.getConfiguration()));
assertEquals(testJaasConfig, secureZKAvailable);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperMainServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperMainServer.java
index 1928b18..9e25ee1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperMainServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperMainServer.java
@@ -65,7 +65,7 @@ public class TestZooKeeperMainServer {
* We need delete of a znode to work at least.
* @throws Exception
*/
- @Test
+ @Test (timeout=60000)
public void testCommandLineWorks() throws Exception {
System.setSecurityManager(new NoExitSecurityManager());
HBaseTestingUtility htu = new HBaseTestingUtility();
@@ -92,7 +92,7 @@ public class TestZooKeeperMainServer {
}
}
- @Test
+ @Test (timeout=60000)
public void testHostPortParse() {
ZooKeeperMainServer parser = new ZooKeeperMainServer();
Configuration c = HBaseConfiguration.create();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperNodeTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperNodeTracker.java
index 010c1c9..7b55c97 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperNodeTracker.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperNodeTracker.java
@@ -92,7 +92,7 @@ public class TestZooKeeperNodeTracker {
// If it wasn't interruptible, we'd never get to here.
}
- @Test
+ @Test (timeout=180000)
public void testNodeTracker() throws Exception {
Abortable abortable = new StubAbortable();
ZooKeeperWatcher zk = new ZooKeeperWatcher(TEST_UTIL.getConfiguration(),
@@ -315,7 +315,7 @@ public class TestZooKeeperNodeTracker {
public void process(WatchedEvent event) {}
}
- @Test
+ @Test (timeout=180000)
public void testCleanZNode() throws Exception {
ZooKeeperWatcher zkw = new ZooKeeperWatcher(TEST_UTIL.getConfiguration(),
"testNodeTracker", new TestZooKeeperNodeTracker.StubAbortable());
diff --git a/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestReplicationShell.java b/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestReplicationShell.java
index da2b709..9680a18 100644
--- a/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestReplicationShell.java
+++ b/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestReplicationShell.java
@@ -28,7 +28,7 @@ import org.junit.experimental.categories.Category;
@Category({ ClientTests.class, LargeTests.class })
public class TestReplicationShell extends AbstractTestShell {
- @Test
+ @Test (timeout=300000)
public void testRunShellTests() throws IOException {
System.setProperty("shell.test.include", "replication_admin_test.rb");
// Start all ruby tests
diff --git a/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestShell.java b/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestShell.java
index 976ba45..0618ead 100644
--- a/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestShell.java
+++ b/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestShell.java
@@ -29,7 +29,7 @@ import org.junit.experimental.categories.Category;
@Category({ ClientTests.class, LargeTests.class })
public class TestShell extends AbstractTestShell {
- @Test
+ @Test (timeout=300000)
public void testRunShellTests() throws IOException {
System.setProperty("shell.test.exclude", "replication_admin_test.rb");
// Start all ruby tests
diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java
index d5a020e..330d569 100644
--- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java
+++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java
@@ -114,7 +114,7 @@ public class TestThriftServer {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=300000)
public void testAll() throws Exception {
// Run all tests
doTestTableCreateDrop();
diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestHTablePool.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestHTablePool.java
index 101a7cf..bcd9058 100644
--- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestHTablePool.java
+++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestHTablePool.java
@@ -59,7 +59,7 @@ public class TestHTablePool {
protected abstract PoolType getPoolType();
- @Test
+ @Test (timeout=180000)
public void testTableWithStringName() throws Exception {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
Integer.MAX_VALUE, getPoolType());
@@ -79,7 +79,7 @@ public class TestHTablePool {
((HTablePool.PooledHTable) sameTable).getWrappedTable());
}
- @Test
+ @Test (timeout=180000)
public void testTableWithByteArrayName() throws IOException {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
Integer.MAX_VALUE, getPoolType());
@@ -98,7 +98,7 @@ public class TestHTablePool {
((HTablePool.PooledHTable) sameTable).getWrappedTable());
}
- @Test
+ @Test (timeout=180000)
public void testTablesWithDifferentNames() throws IOException {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
Integer.MAX_VALUE, getPoolType());
@@ -128,7 +128,7 @@ public class TestHTablePool {
((HTablePool.PooledHTable) table2).getWrappedTable(),
((HTablePool.PooledHTable) sameTable2).getWrappedTable());
}
- @Test
+ @Test (timeout=180000)
public void testProxyImplementationReturned() {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
Integer.MAX_VALUE);
@@ -141,7 +141,7 @@ public class TestHTablePool {
Assert.assertTrue(table instanceof HTablePool.PooledHTable);
}
- @Test
+ @Test (timeout=180000)
public void testDeprecatedUsagePattern() throws IOException {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
Integer.MAX_VALUE);
@@ -163,7 +163,7 @@ public class TestHTablePool {
((HTablePool.PooledHTable) sameTable).getWrappedTable());
}
- @Test
+ @Test (timeout=180000)
public void testReturnDifferentTable() throws IOException {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
Integer.MAX_VALUE);
@@ -187,7 +187,7 @@ public class TestHTablePool {
}
}
- @Test
+ @Test (timeout=180000)
public void testHTablePoolCloseTwice() throws Exception {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
Integer.MAX_VALUE, getPoolType());
@@ -219,7 +219,7 @@ public class TestHTablePool {
return PoolType.Reusable;
}
- @Test
+ @Test (timeout=180000)
public void testTableWithMaxSize() throws Exception {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 2,
getPoolType());
@@ -250,7 +250,7 @@ public class TestHTablePool {
((HTablePool.PooledHTable) sameTable3).getWrappedTable());
}
- @Test
+ @Test (timeout=180000)
public void testCloseTablePool() throws IOException {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 4,
getPoolType());
@@ -294,7 +294,7 @@ public class TestHTablePool {
return PoolType.ThreadLocal;
}
- @Test
+ @Test (timeout=180000)
public void testTableWithMaxSize() throws Exception {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 2,
getPoolType());
@@ -326,7 +326,7 @@ public class TestHTablePool {
((HTablePool.PooledHTable) sameTable3).getWrappedTable());
}
- @Test
+ @Test (timeout=180000)
public void testCloseTablePool() throws IOException {
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(), 4,
getPoolType());
diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java
index f4df271..8b191b9 100644
--- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java
+++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java
@@ -163,7 +163,7 @@ public class TestThriftHBaseServiceHandler {
}
}
- @Test
+ @Test (timeout=180000)
public void testExists() throws TIOError, TException {
ThriftHBaseServiceHandler handler = createHandler();
byte[] rowName = "testExists".getBytes();
@@ -183,7 +183,7 @@ public class TestThriftHBaseServiceHandler {
assertTrue(handler.exists(table, get));
}
- @Test
+ @Test (timeout=180000)
public void testPutGet() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
byte[] rowName = "testPutGet".getBytes();
@@ -206,7 +206,7 @@ public class TestThriftHBaseServiceHandler {
assertTColumnValuesEqual(columnValues, returnedColumnValues);
}
- @Test
+ @Test (timeout=180000)
public void testPutGetMultiple() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
@@ -236,7 +236,7 @@ public class TestThriftHBaseServiceHandler {
assertTColumnValuesEqual(columnValues, results.get(1).getColumnValues());
}
- @Test
+ @Test (timeout=180000)
public void testDeleteMultiple() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
@@ -264,7 +264,7 @@ public class TestThriftHBaseServiceHandler {
assertFalse(handler.exists(table, new TGet(wrap(rowName2))));
}
- @Test
+ @Test (timeout=180000)
public void testDelete() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
byte[] rowName = "testDelete".getBytes();
@@ -301,7 +301,7 @@ public class TestThriftHBaseServiceHandler {
assertTColumnValuesEqual(expectedColumnValues, returnedColumnValues);
}
- @Test
+ @Test (timeout=180000)
public void testDeleteAllTimestamps() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
byte[] rowName = "testDeleteAllTimestamps".getBytes();
@@ -341,7 +341,7 @@ public class TestThriftHBaseServiceHandler {
assertEquals(0, result.getColumnValuesSize());
}
- @Test
+ @Test (timeout=180000)
public void testDeleteSingleTimestamp() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
byte[] rowName = "testDeleteSingleTimestamp".getBytes();
@@ -386,7 +386,7 @@ public class TestThriftHBaseServiceHandler {
assertEquals(timestamp1, result.getColumnValues().get(0).getTimestamp());
}
- @Test
+ @Test (timeout=180000)
public void testIncrement() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
byte[] rowName = "testIncrement".getBytes();
@@ -413,7 +413,7 @@ public class TestThriftHBaseServiceHandler {
assertArrayEquals(Bytes.toBytes(2L), columnValue.getValue());
}
- @Test
+ @Test (timeout=180000)
public void testAppend() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
byte[] rowName = "testAppend".getBytes();
@@ -446,7 +446,7 @@ public class TestThriftHBaseServiceHandler {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testCheckAndPut() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
byte[] rowName = "testCheckAndPut".getBytes();
@@ -493,7 +493,7 @@ public class TestThriftHBaseServiceHandler {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testCheckAndDelete() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
byte[] rowName = "testCheckAndDelete".getBytes();
@@ -536,7 +536,7 @@ public class TestThriftHBaseServiceHandler {
assertEquals(0, result.getColumnValuesSize());
}
- @Test
+ @Test (timeout=180000)
public void testScan() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
@@ -584,7 +584,7 @@ public class TestThriftHBaseServiceHandler {
}
}
- @Test
+ @Test (timeout=180000)
public void testReverseScan() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
@@ -633,7 +633,7 @@ public class TestThriftHBaseServiceHandler {
}
}
- @Test
+ @Test (timeout=180000)
public void testScanWithFilter() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
@@ -698,7 +698,7 @@ public class TestThriftHBaseServiceHandler {
return res;
}
- @Test
+ @Test (timeout=180000)
public void testScanWithBatchSize() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
@@ -757,7 +757,7 @@ public class TestThriftHBaseServiceHandler {
}
}
- @Test
+ @Test (timeout=180000)
public void testGetScannerResults() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
@@ -828,7 +828,7 @@ public class TestThriftHBaseServiceHandler {
}
}
- @Test
+ @Test (timeout=180000)
public void testFilterRegistration() throws Exception {
Configuration conf = UTIL.getConfiguration();
conf.set("hbase.thrift.filters", "MyFilter:filterclass");
@@ -837,7 +837,7 @@ public class TestThriftHBaseServiceHandler {
assertEquals("filterclass", registeredFilters.get("MyFilter"));
}
- @Test
+ @Test (timeout=180000)
public void testMetrics() throws Exception {
Configuration conf = UTIL.getConfiguration();
ThriftMetrics metrics = getMetrics(conf);
@@ -869,7 +869,7 @@ public class TestThriftHBaseServiceHandler {
return m;
}
- @Test
+ @Test (timeout=180000)
public void testAttribute() throws Exception {
byte[] rowName = "testAttribute".getBytes();
byte[] attributeKey = "attribute1".getBytes();
@@ -913,7 +913,7 @@ public class TestThriftHBaseServiceHandler {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testMutateRow() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
byte[] rowName = "testMutateRow".getBytes();
@@ -977,7 +977,7 @@ public class TestThriftHBaseServiceHandler {
*
* @throws Exception
*/
- @Test
+ @Test (timeout=180000)
public void testDurability() throws Exception {
byte[] rowName = "testDurability".getBytes();
List columnValues = new ArrayList();
diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithLabels.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithLabels.java
index 42d1b08..8863833 100644
--- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithLabels.java
+++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithLabels.java
@@ -187,7 +187,7 @@ private ThriftHBaseServiceHandler createHandler() throws IOException {
return new ThriftHBaseServiceHandler(conf, UserProvider.instantiate(conf));
}
-@Test
+@Test (timeout=180000)
public void testScanWithVisibilityLabels() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
@@ -255,7 +255,7 @@ public void testScanWithVisibilityLabels() throws Exception {
}
}
-@Test
+@Test (timeout=180000)
public void testGetScannerResultsWithAuthorizations() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
ByteBuffer table = wrap(tableAname);
@@ -313,7 +313,7 @@ public void testGetScannerResultsWithAuthorizations() throws Exception {
}
}
-@Test
+@Test (timeout=180000)
public void testGetsWithLabels() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
byte[] rowName = "testPutGet".getBytes();
@@ -343,7 +343,7 @@ public void testGetsWithLabels() throws Exception {
assertTColumnValuesEqual(columnValues, returnedColumnValues);
}
-@Test
+@Test (timeout=180000)
public void testIncrementWithTags() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
byte[] rowName = "testIncrementWithTags".getBytes();
@@ -378,7 +378,7 @@ public void testIncrementWithTags() throws Exception {
assertArrayEquals(Bytes.toBytes(2L), columnValue.getValue());
}
-@Test
+@Test (timeout=180000)
public void testIncrementWithTagsWithNotMatchLabels() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
byte[] rowName = "testIncrementWithTagsWithNotMatchLabels".getBytes();
@@ -409,7 +409,7 @@ public void testIncrementWithTagsWithNotMatchLabels() throws Exception {
assertNull(result.getRow());
}
-@Test
+@Test (timeout=180000)
public void testAppend() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
byte[] rowName = "testAppend".getBytes();
| | | | | | | | | | | | | |