diff --git dev-support/hbasetests.sh dev-support/hbasetests.sh
index de12c1b..e129bd4 100755
--- dev-support/hbasetests.sh
+++ dev-support/hbasetests.sh
@@ -307,11 +307,11 @@ echo "Small tests executed after $exeTime minutes"
if (test $parallelMaven -gt 0)
then
echo "Running tests with two maven instances in parallel"
- $mvnCommand -P nonParallelTests test -Dtest=$runList1 $args &
+ $mvnCommand -P localTests test -Dtest=$runList1 $args &
#give some time to the fist process if there is anything to compile
sleep 30
- $mvnCommand -P nonParallelTests test -Dtest=$runList2 $args
+ $mvnCommand -P localTests test -Dtest=$runList2 $args
#wait for forked process to finish
wait
@@ -326,14 +326,14 @@ then
if (test $runAllTests -eq 1 && test ${#flakyTests} -gt 5)
then
echo "Running flaky tests"
- $mvnCommand -P nonParallelTests test -Dtest=$flakyTests $args
+ $mvnCommand -P localTests test -Dtest=$flakyTests $args
cleanProcess
exeTime=$(((`date +%s` - $startTime)/60))
echo "Flaky tests executed after $exeTime minutes"
fi
else
echo "Running tests with a single maven instance, no parallelization"
- $mvnCommand -P nonParallelTests test -Dtest=$runList1,$runList2,$flakyTests $args
+ $mvnCommand -P localTests test -Dtest=$runList1,$runList2,$flakyTests $args
cleanProcess
exeTime=$(((`date +%s` - $startTime)/60))
echo "Single maven instance tests executed after $exeTime minutes"
@@ -420,7 +420,7 @@ then
if (test $replayFailed -gt 0)
then
echo "Replaying all tests that failed"
- $mvnCommand -P nonParallelTests test -Dtest=$replayList $args
+ $mvnCommand -P localTests test -Dtest=$replayList $args
echo "Replaying done"
fi
fi
diff --git pom.xml pom.xml
index 10dc81d..097dc33 100644
--- pom.xml
+++ pom.xml
@@ -329,9 +329,7 @@
-->
org.apache.maven.surefire
- surefire-junit47
-
+ ${surefire.provider}
${surefire.version}
@@ -351,7 +349,23 @@
org.apache.maven.plugins
maven-failsafe-plugin
${surefire.version}
+
+
+ org.apache.maven.surefire
+ ${surefire.provider}
+ ${surefire.version}
+
+
+
+ ${integrationtest.include}
+
+
+ ${unittest.include}
+ **/*$*
+ ${test.exclude.pattern}
+
+ true
${env.LD_LIBRARY_PATH}:${project.build.directory}/nativelib
${env.DYLD_LIBRARY_PATH}:${project.build.directory}/nativelib
@@ -605,13 +619,13 @@
org.apache.maven.plugins
maven-surefire-plugin
- ${surefire.skipfirstPartTests}
+ ${surefire.skipFirstPart}
${surefire.firstPartForkMode}
${surefire.firstPartParallel}
false
${surefire.firstPartThreadCount}
${surefire.firstPartGroups}
- ${surefire.hasSecondPart}
+ false
@@ -634,8 +648,7 @@
maven-failsafe-plugin
false
- always
- org.apache.hadoop.hbase.LargeTests
+ always
@@ -872,22 +885,24 @@
0.91.0
${project.artifactId}-${project.version}
+
+
+ **/Test*.java
+ **/IntegrationTest*.java
2.11-TRUNK-HBASE-2
-
- always
- none
- 1
+ surefire-junit47
-
+
false
- true
- false
+ false
+
+ once
+ none
+ 1
-
-
+ org.apache.hadoop.hbase.SmallTests
+ org.apache.hadoop.hbase.MediumTests
@@ -901,6 +916,7 @@
* javax.xml.stream:stax-api in favour of stax:stax-api
-->
+
com.google.guava
@@ -1886,6 +1902,10 @@
once
none
1
+
+ false
+ true
+
@@ -1912,6 +1932,7 @@
false
+ always
false
true
org.apache.hadoop.hbase.MediumTests
@@ -1925,6 +1946,7 @@
false
+ always
false
true
org.apache.hadoop.hbase.LargeTests
@@ -1944,11 +1966,10 @@
false
false
- true
org.apache.hadoop.hbase.SmallTests
org.apache.hadoop.hbase.MediumTests
-
+
runAllTests
@@ -1962,11 +1983,37 @@
false
false
- true
org.apache.hadoop.hbase.SmallTests
org.apache.hadoop.hbase.MediumTests,org.apache.hadoop.hbase.LargeTests
-
+
+
+
+ skipSurefireTests
+
+ false
+
+
+ true
+ true
+
+
+
+
+ localTests
+
+ false
+
+
+ surefire-junit4
+ 2.10
+
+ always
+ false
+ true
+
+
+
diff --git src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
index 9d26994..96e3049 100644
--- src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
+++ src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
@@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
-import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.*;
import org.apache.hadoop.hdfs.MiniDFSCluster;
/**
diff --git src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java
index a081b6d..a80bd48 100644
--- src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java
+++ src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java
@@ -97,6 +97,9 @@ public class TestColumnPrefixFilter {
while(scanner.next(results));
assertEquals(prefixMap.get(s).size(), results.size());
}
+
+ region.close();
+ region.getLog().closeAndDelete();
}
@Test
@@ -157,6 +160,9 @@ public class TestColumnPrefixFilter {
while(scanner.next(results));
assertEquals(prefixMap.get(s).size(), results.size());
}
+
+ region.close();
+ region.getLog().closeAndDelete();
}
List generateRandomWords(int numberOfWords, String suffix) {
diff --git src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
index 59cb5fa..3afcd06 100644
--- src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
+++ src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
@@ -83,6 +83,7 @@ public class TestDependentColumnFilter extends TestCase {
protected void tearDown() throws Exception {
super.tearDown();
this.region.close();
+ region.getLog().closeAndDelete();
}
private void addData() throws IOException {
diff --git src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
index a2ef1b5..36c8409 100644
--- src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
+++ src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
+import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.experimental.categories.Category;
@@ -163,7 +164,9 @@ public class TestFilter extends HBaseTestCase {
}
protected void tearDown() throws Exception {
- this.region.close();
+ HLog hlog = region.getLog();
+ region.close();
+ hlog.closeAndDelete();
super.tearDown();
}
diff --git src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java
index 9c39e32..b7f5093 100644
--- src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java
+++ src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java
@@ -99,6 +99,9 @@ public class TestMultipleColumnPrefixFilter {
InternalScanner scanner = region.getScanner(scan);
while(scanner.next(results));
assertEquals(prefixMap.get("p").size() + prefixMap.get("q").size(), results.size());
+
+ region.close();
+ region.getLog().closeAndDelete();
}
@Test
@@ -165,6 +168,9 @@ public class TestMultipleColumnPrefixFilter {
InternalScanner scanner = region.getScanner(scan);
while(scanner.next(results));
assertEquals(prefixMap.get("p").size() + prefixMap.get("q").size(), results.size());
+
+ region.close();
+ region.getLog().closeAndDelete();
}
@Test
@@ -218,6 +224,9 @@ public class TestMultipleColumnPrefixFilter {
while(scanner2.next(results2));
assertEquals(results1.size(), results2.size());
+
+ region.close();
+ region.getLog().closeAndDelete();
}
List generateRandomWords(int numberOfWords, String suffix) {
diff --git src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java
index 56a4e89..3141f9b 100644
--- src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java
+++ src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java
@@ -30,9 +30,9 @@ import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.List;
+import junit.framework.Assert;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.ipc.VersionedProtocol;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.Logger;
@@ -45,7 +45,7 @@ import org.junit.experimental.categories.Category;
* be delayed. Check that the last two, which are undelayed, return before the
* first one.
*/
-@Category(SmallTests.class)
+@Category(MediumTests.class) // Fails sometimes with small tests
public class TestDelayedRpc {
public static RpcServer rpcServer;
@@ -233,11 +233,15 @@ public class TestDelayedRpc {
@Override
public void run() {
- Integer result = new Integer(server.test(delay));
- if (results != null) {
- synchronized (results) {
- results.add(result);
+ try {
+ Integer result = new Integer(server.test(delay));
+ if (results != null) {
+ synchronized (results) {
+ results.add(result);
+ }
}
+ } catch (Exception e) {
+ fail("Unexpected exception: "+e.getMessage());
}
}
}
diff --git src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
index 592bbcc..9a3e06b 100644
--- src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
+++ src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
@@ -157,6 +157,9 @@ public class TestColumnSeeking {
assertEquals(kvSet.size(), results.size());
assertTrue(results.containsAll(kvSet));
}
+
+ region.close();
+ region.getLog().closeAndDelete();
}
@SuppressWarnings("unchecked")
@@ -263,6 +266,9 @@ public class TestColumnSeeking {
assertEquals(kvSet.size(), results.size());
assertTrue(results.containsAll(kvSet));
}
+
+ region.close();
+ region.getLog().closeAndDelete();
}
List generateRandomWords(int numberOfWords, String suffix) {
diff --git src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 7281d64..0f8e380 100644
--- src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -1339,6 +1339,7 @@ public class TestHRegion extends HBaseTestCase {
for (int i = 0; i < subregions.length; i++) {
try {
subregions[i].close();
+ subregions[i].getLog().closeAndDelete();
} catch (IOException e) {
// Ignore.
}
diff --git src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
index 0bd0f1f..0ae2960 100644
--- src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
+++ src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
@@ -133,6 +133,9 @@ public class TestKeepDeletes extends HBaseTestCase {
r = region.get(g, null);
checkResult(r, c0, c0, T1);
assertEquals(0, countDeleteMarkers(region));
+
+ region.close();
+ region.getLog().closeAndDelete();
}
/**
@@ -180,6 +183,9 @@ public class TestKeepDeletes extends HBaseTestCase {
kvs = new ArrayList();
scan.next(kvs);
assertTrue(kvs.isEmpty());
+
+ region.close();
+ region.getLog().closeAndDelete();
}
/**
@@ -222,6 +228,9 @@ public class TestKeepDeletes extends HBaseTestCase {
region.compactStores(true);
// major compaction deleted it
assertEquals(0, countDeleteMarkers(region));
+
+ region.close();
+ region.getLog().closeAndDelete();
}
/**
@@ -243,6 +252,9 @@ public class TestKeepDeletes extends HBaseTestCase {
} catch (DoNotRetryIOException dnre) {
// ok!
}
+
+ region.close();
+ region.getLog().closeAndDelete();
}
/**
@@ -287,6 +299,9 @@ public class TestKeepDeletes extends HBaseTestCase {
assertTrue(kvs.get(3).isDeleteType());
assertEquals(kvs.get(4).getValue(), T2);
assertEquals(kvs.get(5).getValue(), T1);
+
+ region.close();
+ region.getLog().closeAndDelete();
}
/**
@@ -327,6 +342,9 @@ public class TestKeepDeletes extends HBaseTestCase {
// major compaction removes all, since there are no puts they affect
region.compactStores(true);
assertEquals(0, countDeleteMarkers(region));
+
+ region.close();
+ region.getLog().closeAndDelete();
}
/**
@@ -387,6 +405,9 @@ public class TestKeepDeletes extends HBaseTestCase {
// so after this collection all markers
region.compactStores(true);
assertEquals(0, countDeleteMarkers(region));
+
+ region.close();
+ region.getLog().closeAndDelete();
}
/**
@@ -464,6 +485,9 @@ public class TestKeepDeletes extends HBaseTestCase {
checkGet(region, T2, c0, c1, ts+3);
checkGet(region, T2, c1, c0, ts+3, T2, T1);
checkGet(region, T2, c1, c1, ts+3, T2, T1);
+
+ region.close();
+ region.getLog().closeAndDelete();
}
/**
@@ -556,6 +580,9 @@ public class TestKeepDeletes extends HBaseTestCase {
region.compactStores(true);
region.compactStores(true);
assertEquals(1, countDeleteMarkers(region));
+
+ region.close();
+ region.getLog().closeAndDelete();
}
/**
@@ -604,6 +631,9 @@ public class TestKeepDeletes extends HBaseTestCase {
scanner.next(kvs);
assertEquals(4, kvs.size());
scanner.close();
+
+ region.close();
+ region.getLog().closeAndDelete();
}
/**
@@ -679,6 +709,9 @@ public class TestKeepDeletes extends HBaseTestCase {
// so after the next compaction the last family delete marker is also gone
region.compactStores(true);
assertEquals(0, countDeleteMarkers(region));
+
+ region.close();
+ region.getLog().closeAndDelete();
}
private void checkGet(HRegion region, byte[] row, byte[] fam, byte[] col,
@@ -689,7 +722,7 @@ public class TestKeepDeletes extends HBaseTestCase {
g.setTimeRange(0L, time);
Result r = region.get(g, null);
checkResult(r, fam, col, vals);
-
+
}
private int countDeleteMarkers(HRegion region) throws IOException {
@@ -707,7 +740,7 @@ public class TestKeepDeletes extends HBaseTestCase {
}
kvs.clear();
} while (hasMore);
- scan.close();
+ scan.close();
return res;
}
diff --git src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
index d0e3d13..a2a70e0 100644
--- src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
+++ src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
@@ -308,6 +308,7 @@ public class TestMultiColumnScanner {
LOG.info("Number of row/col pairs deleted at least once: " +
lastDelTimeMap.size());
region.close();
+ region.getLog().closeAndDelete();
}
static HRegion createRegion(String tableName,
diff --git src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java
index cae1819..80362c9 100644
--- src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java
+++ src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java
@@ -94,5 +94,7 @@ public class TestResettingCounters {
assertTrue(Bytes.equals(kvs[i].getQualifier(), qualifiers[i]));
assertEquals(6, Bytes.toLong(kvs[i].getValue()));
}
+ region.close();
+ region.getLog().closeAndDelete();
}
}
diff --git src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java
index b1a8b2d..d3b355e 100644
--- src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java
+++ src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java
@@ -104,6 +104,7 @@ public class TestScanWithBloomError {
scanColSet(new int[]{1, 4, 6, 7}, new int[]{1, 6, 7});
region.close();
+ region.getLog().closeAndDelete();
}
private void scanColSet(int[] colSet, int[] expectedResultCols)
diff --git src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
index dad5922..dc96c45 100644
--- src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
+++ src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
@@ -427,6 +427,7 @@ public class TestSeekOptimizations {
public void tearDown() throws IOException {
if (region != null) {
region.close();
+ region.getLog().closeAndDelete();
}
// We have to re-set the lazy seek flag back to the default so that other
diff --git src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
index 8bf9081..e1aed35 100644
--- src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
+++ src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
@@ -200,6 +200,7 @@ public class TestSplitTransaction {
daughtersRowCount += count;
} finally {
openRegion.close();
+ openRegion.getLog().closeAndDelete();
}
}
assertEquals(rowcount, daughtersRowCount);
@@ -255,6 +256,7 @@ public class TestSplitTransaction {
daughtersRowCount += count;
} finally {
openRegion.close();
+ openRegion.getLog().closeAndDelete();
}
}
assertEquals(rowcount, daughtersRowCount);
diff --git src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
index 5113c1f..92ab06c 100644
--- src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
+++ src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
@@ -388,6 +388,12 @@ public class TestStore extends TestCase {
assertEquals(oldValue, Bytes.toLong(results.get(1).getValue()));
}
+ @Override
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ EnvironmentEdgeManagerTestHelper.reset();
+ }
+
public void testICV_negMemstoreSize() throws IOException {
init(this.getName());
diff --git src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
index d0cda62..07163f2 100644
--- src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
+++ src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
@@ -41,7 +41,8 @@ import java.util.NavigableSet;
import java.util.TreeSet;
import static org.apache.hadoop.hbase.regionserver.KeyValueScanFixture.scanFixture;
-@Category(SmallTests.class)
+// Can't be small as it plays with EnvironmentEdgeManager
+@Category(MediumTests.class)
public class TestStoreScanner extends TestCase {
private static final String CF_STR = "cf";
final byte [] CF = Bytes.toBytes(CF_STR);
@@ -502,69 +503,73 @@ public class TestStoreScanner extends TestCase {
}
public void testDeleteMarkerLongevity() throws Exception {
- final long now = System.currentTimeMillis();
- EnvironmentEdgeManagerTestHelper.injectEdge(new EnvironmentEdge() {
- public long currentTimeMillis() {
- return now;
- }
- });
- KeyValue [] kvs = new KeyValue[] {
+ try {
+ final long now = System.currentTimeMillis();
+ EnvironmentEdgeManagerTestHelper.injectEdge(new EnvironmentEdge() {
+ public long currentTimeMillis() {
+ return now;
+ }
+ });
+ KeyValue[] kvs = new KeyValue[]{
/*0*/ new KeyValue(Bytes.toBytes("R1"), Bytes.toBytes("cf"), null,
- now-100, KeyValue.Type.DeleteFamily), // live
+ now - 100, KeyValue.Type.DeleteFamily), // live
/*1*/ new KeyValue(Bytes.toBytes("R1"), Bytes.toBytes("cf"), null,
- now-1000, KeyValue.Type.DeleteFamily), // expired
- /*2*/ KeyValueTestUtil.create("R1", "cf", "a", now-50,
- KeyValue.Type.Put, "v3"), // live
- /*3*/ KeyValueTestUtil.create("R1", "cf", "a", now-55,
- KeyValue.Type.Delete, "dontcare"), // live
- /*4*/ KeyValueTestUtil.create("R1", "cf", "a", now-55,
- KeyValue.Type.Put, "deleted-version v2"), // deleted
- /*5*/ KeyValueTestUtil.create("R1", "cf", "a", now-60,
- KeyValue.Type.Put, "v1"), // live
- /*6*/ KeyValueTestUtil.create("R1", "cf", "a", now-65,
- KeyValue.Type.Put, "v0"), // max-version reached
+ now - 1000, KeyValue.Type.DeleteFamily), // expired
+ /*2*/ KeyValueTestUtil.create("R1", "cf", "a", now - 50,
+ KeyValue.Type.Put, "v3"), // live
+ /*3*/ KeyValueTestUtil.create("R1", "cf", "a", now - 55,
+ KeyValue.Type.Delete, "dontcare"), // live
+ /*4*/ KeyValueTestUtil.create("R1", "cf", "a", now - 55,
+ KeyValue.Type.Put, "deleted-version v2"), // deleted
+ /*5*/ KeyValueTestUtil.create("R1", "cf", "a", now - 60,
+ KeyValue.Type.Put, "v1"), // live
+ /*6*/ KeyValueTestUtil.create("R1", "cf", "a", now - 65,
+ KeyValue.Type.Put, "v0"), // max-version reached
/*7*/ KeyValueTestUtil.create("R1", "cf", "a",
- now-100, KeyValue.Type.DeleteColumn, "dont-care"), // max-version
- /*8*/ KeyValueTestUtil.create("R1", "cf", "b", now-600,
- KeyValue.Type.DeleteColumn, "dont-care"), //expired
- /*9*/ KeyValueTestUtil.create("R1", "cf", "b", now-70,
- KeyValue.Type.Put, "v2"), //live
- /*10*/ KeyValueTestUtil.create("R1", "cf", "b", now-750,
- KeyValue.Type.Put, "v1"), //expired
- /*11*/ KeyValueTestUtil.create("R1", "cf", "c", now-500,
- KeyValue.Type.Delete, "dontcare"), //expired
- /*12*/ KeyValueTestUtil.create("R1", "cf", "c", now-600,
- KeyValue.Type.Put, "v1"), //expired
- /*13*/ KeyValueTestUtil.create("R1", "cf", "c", now-1000,
- KeyValue.Type.Delete, "dontcare"), //expired
- /*14*/ KeyValueTestUtil.create("R1", "cf", "d", now-60,
- KeyValue.Type.Put, "expired put"), //live
- /*15*/ KeyValueTestUtil.create("R1", "cf", "d", now-100,
- KeyValue.Type.Delete, "not-expired delete"), //live
- };
- List scanners = scanFixture(kvs);
- Scan scan = new Scan();
- scan.setMaxVersions(2);
- Store.ScanInfo scanInfo = new Store.ScanInfo(Bytes.toBytes("cf"),
+ now - 100, KeyValue.Type.DeleteColumn, "dont-care"), // max-version
+ /*8*/ KeyValueTestUtil.create("R1", "cf", "b", now - 600,
+ KeyValue.Type.DeleteColumn, "dont-care"), //expired
+ /*9*/ KeyValueTestUtil.create("R1", "cf", "b", now - 70,
+ KeyValue.Type.Put, "v2"), //live
+ /*10*/ KeyValueTestUtil.create("R1", "cf", "b", now - 750,
+ KeyValue.Type.Put, "v1"), //expired
+ /*11*/ KeyValueTestUtil.create("R1", "cf", "c", now - 500,
+ KeyValue.Type.Delete, "dontcare"), //expired
+ /*12*/ KeyValueTestUtil.create("R1", "cf", "c", now - 600,
+ KeyValue.Type.Put, "v1"), //expired
+ /*13*/ KeyValueTestUtil.create("R1", "cf", "c", now - 1000,
+ KeyValue.Type.Delete, "dontcare"), //expired
+ /*14*/ KeyValueTestUtil.create("R1", "cf", "d", now - 60,
+ KeyValue.Type.Put, "expired put"), //live
+ /*15*/ KeyValueTestUtil.create("R1", "cf", "d", now - 100,
+ KeyValue.Type.Delete, "not-expired delete"), //live
+ };
+ List scanners = scanFixture(kvs);
+ Scan scan = new Scan();
+ scan.setMaxVersions(2);
+ Store.ScanInfo scanInfo = new Store.ScanInfo(Bytes.toBytes("cf"),
0 /* minVersions */,
2 /* maxVersions */, 500 /* ttl */,
false /* keepDeletedCells */,
200, /* timeToPurgeDeletes */
KeyValue.COMPARATOR);
- StoreScanner scanner =
+ StoreScanner scanner =
new StoreScanner(scan, scanInfo,
- StoreScanner.ScanType.MAJOR_COMPACT, null, scanners,
- HConstants.OLDEST_TIMESTAMP);
- List results = new ArrayList();
- results = new ArrayList();
- assertEquals(true, scanner.next(results));
- assertEquals(kvs[0], results.get(0));
- assertEquals(kvs[2], results.get(1));
- assertEquals(kvs[3], results.get(2));
- assertEquals(kvs[5], results.get(3));
- assertEquals(kvs[9], results.get(4));
- assertEquals(kvs[14], results.get(5));
- assertEquals(kvs[15], results.get(6));
- assertEquals(7, results.size());
- }
+ StoreScanner.ScanType.MAJOR_COMPACT, null, scanners,
+ HConstants.OLDEST_TIMESTAMP);
+ List results = new ArrayList();
+ results = new ArrayList();
+ assertEquals(true, scanner.next(results));
+ assertEquals(kvs[0], results.get(0));
+ assertEquals(kvs[2], results.get(1));
+ assertEquals(kvs[3], results.get(2));
+ assertEquals(kvs[5], results.get(3));
+ assertEquals(kvs[9], results.get(4));
+ assertEquals(kvs[14], results.get(5));
+ assertEquals(kvs[15], results.get(6));
+ assertEquals(7, results.size());
+ }finally{
+ EnvironmentEdgeManagerTestHelper.reset();
+ }
+ }
}
diff --git src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
index caf3d19..ccf6fa4 100644
--- src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
+++ src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
@@ -36,7 +36,8 @@ import org.junit.experimental.categories.Category;
/**
* Tests for {@link FSTableDescriptors}.
*/
-@Category(SmallTests.class)
+// Do not support to be executed in he same JVM as other tests
+@Category(MediumTests.class)
public class TestFSTableDescriptors {
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final Log LOG = LogFactory.getLog(TestFSTableDescriptors.class);
diff --git src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java
index 56453c9..52fbfaa 100644
--- src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java
+++ src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java
@@ -20,6 +20,7 @@
package org.apache.hadoop.hbase.util;
import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -29,7 +30,7 @@ import static junit.framework.Assert.assertEquals;
* Tests that the incrementing environment edge increments time instead of using
* the default.
*/
-@Category(MediumTests.class)
+@Category(SmallTests.class)
public class TestIncrementingEnvironmentEdge {
@Test