diff --git src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java index e105b77..8643359 100644 --- src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java +++ src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java @@ -24,7 +24,22 @@ package org.apache.hadoop.hbase.util; */ public class IncrementingEnvironmentEdge implements EnvironmentEdge { - private long timeIncrement = 1; + private long timeIncrement; + + /** + * Construct an incremental edge starting from 1 + */ + public IncrementingEnvironmentEdge() { + this(1); + } + + /** + * Construct an incremental edge with an initial amount + * @param initalAmount the inital value to start with + */ + public IncrementingEnvironmentEdge(long initialAmount) { + this.timeIncrement = initialAmount; + } /** * {@inheritDoc} @@ -36,4 +51,12 @@ public class IncrementingEnvironmentEdge implements EnvironmentEdge { public synchronized long currentTimeMillis() { return timeIncrement++; } + + /** + * Increment the time by the given amount + */ + public synchronized long incrementTime(long amount) { + timeIncrement += amount; + return timeIncrement; + } } diff --git src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java index 7a118da..e9cbd4f 100644 --- src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java +++ src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java @@ -19,31 +19,35 @@ */ package org.apache.hadoop.hbase.coprocessor; +import static junit.framework.Assert.assertEquals; + import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; -import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; -import org.apache.hadoop.hbase.coprocessor.ObserverContext; -import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; +import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge; import org.junit.AfterClass; +import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import static org.junit.Assert.assertEquals; - @Category(MediumTests.class) public class TestRegionObserverBypass { private static HBaseTestingUtility util; @@ -61,7 +65,6 @@ public class TestRegionObserverBypass { TestCoprocessor.class.getName()); util = new HBaseTestingUtility(conf); util.startMiniCluster(); - util.createTable(tableName, new byte[][] {dummy, test}); } @AfterClass @@ -69,6 +72,18 @@ public class TestRegionObserverBypass { util.shutdownMiniCluster(); } + @Before + public void setUp() throws Exception { + HBaseAdmin admin = util.getHBaseAdmin(); + if (admin.tableExists(tableName)) { + if (admin.isTableEnabled(tableName)) { + admin.disableTable(tableName); + } + admin.deleteTable(tableName); + } + util.createTable(tableName, new byte[][] {dummy, test}); + } + /** * do a single put that is bypassed by a RegionObserver * @throws Exception @@ -90,6 +105,10 @@ public class TestRegionObserverBypass { */ @Test public void testMulti() throws Exception { + //ensure that server time increments every time we do an operation, otherwise + //previous deletes will eclipse successive puts having the same timestamp + EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge()); + HTable t = new HTable(util.getConfiguration(), tableName); List puts = new ArrayList(); Put p = new Put(row1); @@ -171,6 +190,8 @@ public class TestRegionObserverBypass { checkRowAndDelete(t,row2,1); checkRowAndDelete(t,row3,0); t.close(); + + EnvironmentEdgeManager.reset(); } private void checkRowAndDelete(HTable t, byte[] row, int count) throws IOException { diff --git src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java index 44aa3e4..bec82a7 100644 --- src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java +++ src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics.BlockMetricType; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Threads; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -66,7 +67,7 @@ public class TestScannerSelectionUsingTTL { private static String FAMILY = "myCF"; private static byte[] FAMILY_BYTES = Bytes.toBytes(FAMILY); - private static final int TTL_SECONDS = 2; + private static final int TTL_SECONDS = 10; private static final int TTL_MS = TTL_SECONDS * 1000; private static final int NUM_EXPIRED_FILES = 2; @@ -111,20 +112,24 @@ public class TestScannerSelectionUsingTTL { HRegion.createHRegion(info, TEST_UTIL.getClusterTestDir(), conf, htd); + long ts = EnvironmentEdgeManager.currentTimeMillis(); + long version = 0; //make sure each new set of Put's have a new ts for (int iFile = 0; iFile < totalNumFiles; ++iFile) { if (iFile == NUM_EXPIRED_FILES) { Threads.sleepWithoutInterrupt(TTL_MS); + version += TTL_MS; } for (int iRow = 0; iRow < NUM_ROWS; ++iRow) { Put put = new Put(Bytes.toBytes("row" + iRow)); for (int iCol = 0; iCol < NUM_COLS_PER_ROW; ++iCol) { put.add(FAMILY_BYTES, Bytes.toBytes("col" + iCol), - Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol)); + ts + version, Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol)); } region.put(put); } region.flushcache(); + version++; } Scan scan = new Scan(); diff --git src/test/java/org/apache/hadoop/hbase/master/TestLogsCleaner.java src/test/java/org/apache/hadoop/hbase/master/TestLogsCleaner.java index ffda68d..01b99ae 100644 --- src/test/java/org/apache/hadoop/hbase/master/TestLogsCleaner.java +++ src/test/java/org/apache/hadoop/hbase/master/TestLogsCleaner.java @@ -64,7 +64,7 @@ public class TestLogsCleaner { public void testLogCleaning() throws Exception{ Configuration conf = TEST_UTIL.getConfiguration(); // set TTL - long ttl = 2000; + long ttl = 10000; conf.setLong("hbase.master.logcleaner.ttl", ttl); conf.setBoolean(HConstants.REPLICATION_ENABLE_KEY, true); Replication.decorateMasterConfiguration(conf); diff --git src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java index ae5da03..d094295 100644 --- src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java +++ src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java @@ -21,13 +21,21 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.HBaseTestCase; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; +import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge; import org.junit.experimental.categories.Category; @Category(SmallTests.class) @@ -43,6 +51,27 @@ public class TestKeepDeletes extends HBaseTestCase { private final byte[] c0 = COLUMNS[0]; private final byte[] c1 = COLUMNS[1]; + @Override + protected void setUp() throws Exception { + super.setUp(); + /* HBASE-6832: [WINDOWS] Tests should use explicit timestamp for Puts, and not rely on + * implicit RS timing. + * Use an explicit timer (IncrementingEnvironmentEdge) so that the put, delete + * compact timestamps are tracked. Otherwise, forced major compaction will not purge + * Delete's having the same timestamp. see ScanQueryMatcher.match(): + * if (retainDeletesInOutput + * || (!isUserScan && (EnvironmentEdgeManager.currentTimeMillis() - timestamp) + * <= timeToPurgeDeletes) ... ) + */ + EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge(3000)); + } + + @Override + protected void tearDown() throws Exception { + super.tearDown(); + EnvironmentEdgeManager.reset(); + } + /** * Make sure that deleted rows are retained. * Family delete markers are deleted. @@ -55,7 +84,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); region.put(p); @@ -139,7 +168,7 @@ public class TestKeepDeletes extends HBaseTestCase { } /** - * Even when the store does not keep deletes a "raw" scan will + * Even when the store does not keep deletes a "raw" scan will * return everything it can find (unless discarding cells is guaranteed * to have no effect). * Assuming this the desired behavior. Could also disallow "raw" scanning @@ -152,7 +181,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, false); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); region.put(p); @@ -197,7 +226,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, false); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); region.put(p); @@ -245,7 +274,7 @@ public class TestKeepDeletes extends HBaseTestCase { s.setRaw(true); s.setMaxVersions(); s.addColumn(c0, c0); - + try { InternalScanner scan = region.getScanner(s); fail("raw scanner with columns should have failed"); @@ -265,7 +294,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); region.put(p); @@ -312,7 +341,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Delete d = new Delete(T1, ts, null); d.deleteColumns(c0, c0, ts); @@ -325,7 +354,7 @@ public class TestKeepDeletes extends HBaseTestCase { d = new Delete(T1, ts, null); d.deleteColumn(c0, c0, ts+1); region.delete(d, null, true); - + d = new Delete(T1, ts, null); d.deleteColumn(c0, c0, ts+2); region.delete(d, null, true); @@ -355,7 +384,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); @@ -378,7 +407,7 @@ public class TestKeepDeletes extends HBaseTestCase { d = new Delete(T1, ts, null); d.deleteColumn(c0, c0, ts+1); region.delete(d, null, true); - + d = new Delete(T1, ts, null); d.deleteColumn(c0, c0, ts+2); region.delete(d, null, true); @@ -418,7 +447,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); p.add(c0, c1, T1); @@ -500,7 +529,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); region.put(p); @@ -510,7 +539,7 @@ public class TestKeepDeletes extends HBaseTestCase { p = new Put(T1, ts-10); p.add(c0, c1, T1); region.put(p); - + Delete d = new Delete(T1, ts, null); // test corner case (Put and Delete have same TS) d.deleteColumns(c0, c0, ts); @@ -519,7 +548,7 @@ public class TestKeepDeletes extends HBaseTestCase { d = new Delete(T1, ts+1, null); d.deleteColumn(c0, c0, ts+1); region.delete(d, null, true); - + d = new Delete(T1, ts+3, null); d.deleteColumn(c0, c0, ts+3); region.delete(d, null, true); @@ -535,7 +564,7 @@ public class TestKeepDeletes extends HBaseTestCase { p = new Put(T1, ts+2); p.add(c0, c0, T2); region.put(p); - + // delete, put, delete, delete, put assertEquals(3, countDeleteMarkers(region)); @@ -593,7 +622,7 @@ public class TestKeepDeletes extends HBaseTestCase { HConstants.FOREVER, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis(); + long ts = EnvironmentEdgeManager.currentTimeMillis(); Put p = new Put(T1, ts); p.add(c0, c0, T1); @@ -644,7 +673,7 @@ public class TestKeepDeletes extends HBaseTestCase { HTableDescriptor htd = createTableDescriptor(getName(), 3, 1000, 1, true); HRegion region = createNewHRegion(htd, null, null); - long ts = System.currentTimeMillis() - 2000; // 2s in the past + long ts = EnvironmentEdgeManager.currentTimeMillis() - 2000; // 2s in the past Put p = new Put(T1, ts); p.add(c0, c0, T3); diff --git src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java index 121d277..ec0e18b 100644 --- src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java +++ src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java @@ -60,6 +60,7 @@ import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; +import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge; import org.apache.hadoop.hbase.util.ManualEnvironmentEdge; import org.apache.hadoop.util.Progressable; import org.junit.experimental.categories.Category; @@ -153,6 +154,8 @@ public class TestStore extends TestCase { public void testDeleteExpiredStoreFiles() throws Exception { int storeFileNum = 4; int ttl = 4; + IncrementingEnvironmentEdge edge = new IncrementingEnvironmentEdge(); + EnvironmentEdgeManagerTestHelper.injectEdge(edge); Configuration conf = HBaseConfiguration.create(); // Enable the expired store file deletion @@ -172,7 +175,7 @@ public class TestStore extends TestCase { this.store.add(new KeyValue(row, family, qf2, timeStamp, (byte[]) null)); this.store.add(new KeyValue(row, family, qf3, timeStamp, (byte[]) null)); flush(i); - Thread.sleep(sleepTime); + edge.incrementTime(sleepTime); } // Verify the total number of store files @@ -185,13 +188,13 @@ public class TestStore extends TestCase { CompactionRequest cr = this.store.requestCompaction(); assertEquals(1, cr.getFiles().size()); assertTrue(cr.getFiles().get(0).getReader().getMaxTimestamp() < - (System.currentTimeMillis() - this.store.scanInfo.getTtl())); + (EnvironmentEdgeManager.currentTimeMillis() - this.store.scanInfo.getTtl())); // Verify that the expired the store has been deleted. this.store.compact(cr); assertEquals(storeFileNum - i, this.store.getStorefiles().size()); // Let the next store file expired. - Thread.sleep(sleepTime); + edge.incrementTime(sleepTime); } } diff --git src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java index f3df9ed..49eb3bd 100644 --- src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java +++ src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java @@ -22,7 +22,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.net.InetAddress; -import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -33,6 +32,9 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.thrift.ThriftServerRunner.ImplType; import org.apache.hadoop.hbase.thrift.generated.Hbase; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; +import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge; import org.apache.hadoop.hbase.util.Threads; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TCompactProtocol; @@ -112,11 +114,15 @@ public class TestThriftServerCmdLine { @BeforeClass public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); + //ensure that server time increments every time we do an operation, otherwise + //successive puts having the same timestamp will override each other + EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge()); } @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); + EnvironmentEdgeManager.reset(); } private void startCmdLineThread(final String[] args) {