diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java index aa3b6dd..38e5b59 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java @@ -141,6 +141,13 @@ public class TestHFileOutputFormat { Random random = new Random(); for (int i = 0; i < ROWSPERSPLIT; i++) { + long ts = HConstants.LATEST_TIMESTAMP; + boolean writeDelete = false; + + if (0 == i % 100) { + writeDelete = true; + ts = System.currentTimeMillis(); + } random.nextBytes(keyBytes); // Ensure that unique tasks generate unique keys @@ -150,7 +157,14 @@ public class TestHFileOutputFormat { for (byte[] family : TestHFileOutputFormat.FAMILIES) { KeyValue kv = new KeyValue(keyBytes, family, - PerformanceEvaluation.QUALIFIER_NAME, valBytes); + PerformanceEvaluation.QUALIFIER_NAME, ts, valBytes); + context.write(key, kv); + } + + // also write a deletion marker every 100 records + if (writeDelete) { + KeyValue kv = new KeyValue(keyBytes, TestHFileOutputFormat.FAMILIES[0], + PerformanceEvaluation.QUALIFIER_NAME, ts, KeyValue.Type.DeleteColumn); context.write(key, kv); } } @@ -422,7 +436,8 @@ public class TestHFileOutputFormat { Scan scan = new Scan(); ResultScanner results = table.getScanner(scan); for (Result res : results) { - assertEquals(FAMILIES.length, res.raw().length); + assertTrue(FAMILIES.length == res.raw().length + || FAMILIES.length - 1 == res.raw().length); KeyValue first = res.raw()[0]; for (KeyValue kv : res.raw()) { assertTrue(KeyValue.COMPARATOR.matchingRows(first, kv));