diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java index aaa50b1..6215acf 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java @@ -37,7 +37,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -255,7 +255,7 @@ public final class BackupSystemTable implements Closeable { res.advance(); byte[] row = CellUtil.cloneRow(res.listCells().get(0)); for (Cell cell : res.listCells()) { - if (CellComparator.compareQualifiers(cell, BackupSystemTable.PATH_COL, 0, + if (CellComparatorImpl.compareQualifiers(cell, BackupSystemTable.PATH_COL, 0, BackupSystemTable.PATH_COL.length) == 0) { map.put(row, Bytes.toString(CellUtil.cloneValue(cell))); } @@ -284,13 +284,13 @@ public final class BackupSystemTable implements Closeable { byte[] fam = null; String path = null; for (Cell cell : res.listCells()) { - if (CellComparator.compareQualifiers(cell, BackupSystemTable.TBL_COL, 0, + if (CellComparatorImpl.compareQualifiers(cell, BackupSystemTable.TBL_COL, 0, BackupSystemTable.TBL_COL.length) == 0) { tbl = TableName.valueOf(CellUtil.cloneValue(cell)); - } else if (CellComparator.compareQualifiers(cell, BackupSystemTable.FAM_COL, 0, + } else if (CellComparatorImpl.compareQualifiers(cell, BackupSystemTable.FAM_COL, 0, BackupSystemTable.FAM_COL.length) == 0) { fam = CellUtil.cloneValue(cell); - } else if (CellComparator.compareQualifiers(cell, BackupSystemTable.PATH_COL, 0, + } else if (CellComparatorImpl.compareQualifiers(cell, BackupSystemTable.PATH_COL, 0, BackupSystemTable.PATH_COL.length) == 0) { path = Bytes.toString(CellUtil.cloneValue(cell)); } @@ -436,13 +436,13 @@ public final class BackupSystemTable implements Closeable { rows.add(row); String rowStr = Bytes.toString(row); region = BackupSystemTable.getRegionNameFromOrigBulkLoadRow(rowStr); - if (CellComparator.compareQualifiers(cell, BackupSystemTable.FAM_COL, 0, + if (CellComparatorImpl.compareQualifiers(cell, BackupSystemTable.FAM_COL, 0, BackupSystemTable.FAM_COL.length) == 0) { fam = Bytes.toString(CellUtil.cloneValue(cell)); - } else if (CellComparator.compareQualifiers(cell, BackupSystemTable.PATH_COL, 0, + } else if (CellComparatorImpl.compareQualifiers(cell, BackupSystemTable.PATH_COL, 0, BackupSystemTable.PATH_COL.length) == 0) { path = Bytes.toString(CellUtil.cloneValue(cell)); - } else if (CellComparator.compareQualifiers(cell, BackupSystemTable.STATE_COL, 0, + } else if (CellComparatorImpl.compareQualifiers(cell, BackupSystemTable.STATE_COL, 0, BackupSystemTable.STATE_COL.length) == 0) { byte[] state = CellUtil.cloneValue(cell); if (Bytes.equals(BackupSystemTable.BL_PREPARE, state)) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java index 1ff64d5..9c7fd34 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java @@ -39,7 +39,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; @@ -315,7 +315,8 @@ public final class ConnectionUtils { return result; } Cell[] rawCells = result.rawCells(); - int index = Arrays.binarySearch(rawCells, keepCellsAfter, CellComparator::compareWithoutRow); + int index = + Arrays.binarySearch(rawCells, keepCellsAfter, CellComparatorImpl.COMPARATOR::compareWithoutRow); if (index < 0) { index = -index - 1; } else { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java index 8848404..0d3e108 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java @@ -29,7 +29,7 @@ import java.util.TreeMap; import java.util.UUID; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java index 1871d17..ff0f269 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java @@ -33,7 +33,7 @@ import java.util.NavigableMap; import java.util.TreeMap; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellScannable; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; @@ -211,14 +211,14 @@ public class Result implements CellScannable, CellScanner { * Return the array of Cells backing this Result instance. * * The array is sorted from smallest -> largest using the - * {@link CellComparator#COMPARATOR}. + * {@link CellComparatorImpl#COMPARATOR}. * * The array only contains what your Get or Scan specifies and no more. * For example if you request column "A" 1 version you will have at most 1 * Cell in the array. If you request column "A" with 2 version you will * have at most 2 Cells, with the first one being the newer timestamp and * the second being the older timestamp (this is the sort order defined by - * {@link CellComparator#COMPARATOR}). If columns don't exist, they won't be + * {@link CellComparatorImpl#COMPARATOR}). If columns don't exist, they won't be * present in the result. Therefore if you ask for 1 version all columns, * it is safe to iterate over this array and expect to see 1 Cell for * each column and no more. @@ -244,7 +244,7 @@ public class Result implements CellScannable, CellScanner { /** * Return the Cells for the specific column. The Cells are sorted in - * the {@link CellComparator#COMPARATOR} order. That implies the first entry in + * the {@link CellComparatorImpl#COMPARATOR} order. That implies the first entry in * the list is the most recent column. If the query (Scan or Get) only * requested 1 version the list will contain at most 1 entry. If the column * did not exist in the result set (either the column does not exist @@ -301,7 +301,7 @@ public class Result implements CellScannable, CellScanner { qualifierNotNull, 0, qualifierNotNull.length); // pos === ( -(insertion point) - 1) - int pos = Arrays.binarySearch(kvs, searchTerm, CellComparator.COMPARATOR); + int pos = Arrays.binarySearch(kvs, searchTerm, CellComparatorImpl.COMPARATOR); // never will exact match if (pos < 0) { pos = (pos+1) * -1; @@ -346,7 +346,7 @@ public class Result implements CellScannable, CellScanner { qualifier, qoffset, qlength); // pos === ( -(insertion point) - 1) - int pos = Arrays.binarySearch(kvs, searchTerm, CellComparator.COMPARATOR); + int pos = Arrays.binarySearch(kvs, searchTerm, CellComparatorImpl.COMPARATOR); // never will exact match if (pos < 0) { pos = (pos+1) * -1; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java index 1565ddd..1f4b143 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.util.ArrayList; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -120,7 +120,7 @@ public class ColumnPaginationFilter extends FilterBase { int cmp = 0; // Only compare if no KV's have been seen so far. if (count == 0) { - cmp = CellComparator.compareQualifiers(v, this.columnOffset, 0, this.columnOffset.length); + cmp = CellComparatorImpl.compareQualifiers(v, this.columnOffset, 0, this.columnOffset.length); } if (cmp < 0) { return ReturnCode.SEEK_NEXT_USING_HINT; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java index 425600c..1de4858 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java @@ -25,7 +25,7 @@ import java.io.IOException; import java.util.ArrayList; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -126,7 +126,7 @@ public class ColumnRangeFilter extends FilterBase { int cmpMin = 1; if (this.minColumn != null) { - cmpMin = CellComparator.compareQualifiers(kv, this.minColumn, 0, this.minColumn.length); + cmpMin = CellComparatorImpl.compareQualifiers(kv, this.minColumn, 0, this.minColumn.length); } if (cmpMin < 0) { @@ -141,7 +141,7 @@ public class ColumnRangeFilter extends FilterBase { return ReturnCode.INCLUDE; } - int cmpMax = CellComparator.compareQualifiers(kv, this.maxColumn, 0, this.maxColumn.length); + int cmpMax = CellComparatorImpl.compareQualifiers(kv, this.maxColumn, 0, this.maxColumn.length); if (this.maxColumnInclusive && cmpMax <= 0 || !this.maxColumnInclusive && cmpMax < 0) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java index c305ffc..8db2133 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.util.ArrayList; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CompareOperator; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -137,7 +137,7 @@ public abstract class CompareFilter extends FilterBase { if (compareOp == CompareOp.NO_OP) { return true; } - int compareResult = CellComparator.compareRow(cell, comparator); + int compareResult = CellComparatorImpl.compareRow(cell, comparator); return compare(compareOp, compareResult); } @@ -146,7 +146,7 @@ public abstract class CompareFilter extends FilterBase { if (op == CompareOperator.NO_OP) { return true; } - int compareResult = CellComparator.compareRow(cell, comparator); + int compareResult = CellComparatorImpl.compareRow(cell, comparator); return compare(op, compareResult); } @@ -160,7 +160,7 @@ public abstract class CompareFilter extends FilterBase { if (compareOp == CompareOp.NO_OP) { return true; } - int compareResult = CellComparator.compareFamily(cell, comparator); + int compareResult = CellComparatorImpl.compareFamily(cell, comparator); return compare(compareOp, compareResult); } @@ -169,7 +169,7 @@ public abstract class CompareFilter extends FilterBase { if (op == CompareOperator.NO_OP) { return true; } - int compareResult = CellComparator.compareFamily(cell, comparator); + int compareResult = CellComparatorImpl.compareFamily(cell, comparator); return compare(op, compareResult); } @@ -184,7 +184,7 @@ public abstract class CompareFilter extends FilterBase { if (compareOp == CompareOp.NO_OP) { return true; } - int compareResult = CellComparator.compareQualifier(cell, comparator); + int compareResult = CellComparatorImpl.compareQualifier(cell, comparator); return compare(compareOp, compareResult); } @@ -194,7 +194,7 @@ public abstract class CompareFilter extends FilterBase { if (op == CompareOperator.NO_OP) { return true; } - int compareResult = CellComparator.compareQualifier(cell, comparator); + int compareResult = CellComparatorImpl.compareQualifier(cell, comparator); return compare(op, compareResult); } @@ -209,7 +209,7 @@ public abstract class CompareFilter extends FilterBase { if (compareOp == CompareOp.NO_OP) { return true; } - int compareResult = CellComparator.compareValue(cell, comparator); + int compareResult = CellComparatorImpl.compareValue(cell, comparator); return compare(compareOp, compareResult); } @@ -218,7 +218,7 @@ public abstract class CompareFilter extends FilterBase { if (op == CompareOperator.NO_OP) { return true; } - int compareResult = CellComparator.compareValue(cell, comparator); + int compareResult = CellComparatorImpl.compareValue(cell, comparator); return compare(op, compareResult); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java index 033ca83..c326f67 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java @@ -27,7 +27,7 @@ import java.util.List; import java.util.Set; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.yetus.audience.InterfaceAudience; @@ -315,7 +315,7 @@ final public class FilterList extends FilterBase { case SEEK_NEXT_USING_HINT: Cell nextHintCell = getNextCellHint(prevCell); return nextHintCell == null - || CellComparator.COMPARATOR.compare(currentCell, nextHintCell) >= 0; + || CellComparatorImpl.COMPARATOR.compare(currentCell, nextHintCell) >= 0; case NEXT_COL: case INCLUDE_AND_NEXT_COL: return !CellUtil.matchingRowColumn(prevCell, currentCell); @@ -752,7 +752,7 @@ final public class FilterList extends FilterBase { keyHint = curKeyHint; continue; } - if (CellComparator.COMPARATOR.compare(keyHint, curKeyHint) < 0) { + if (CellComparatorImpl.COMPARATOR.compare(keyHint, curKeyHint) < 0) { keyHint = curKeyHint; } } @@ -774,7 +774,7 @@ final public class FilterList extends FilterBase { keyHint = curKeyHint; continue; } - if (CellComparator.COMPARATOR.compare(keyHint, curKeyHint) > 0) { + if (CellComparatorImpl.COMPARATOR.compare(keyHint, curKeyHint) > 0) { keyHint = curKeyHint; } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java index 24d4fab..244e8fb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java @@ -24,7 +24,7 @@ import java.util.List; import java.util.PriorityQueue; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -235,7 +235,7 @@ public class FuzzyRowFilter extends FilterBase { boolean lessThan(Cell currentCell, byte[] nextRowKey) { int compareResult = - CellComparator.COMPARATOR.compareRows(currentCell, nextRowKey, 0, nextRowKey.length); + CellComparatorImpl.COMPARATOR.compareRows(currentCell, nextRowKey, 0, nextRowKey.length); return (!isReversed() && compareResult < 0) || (isReversed() && compareResult > 0); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java index 3c94c2c..c467d17 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java @@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.filter; import java.util.ArrayList; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; @@ -60,7 +60,7 @@ public class InclusiveStopFilter extends FilterBase { public boolean filterRowKey(Cell firstRowCell) { // if stopRowKey is <= buffer, then true, filter row. if (filterAllRemaining()) return true; - int cmp = CellComparator.COMPARATOR.compareRows(firstRowCell, stopRowKey, 0, stopRowKey.length); + int cmp = CellComparatorImpl.COMPARATOR.compareRows(firstRowCell, stopRowKey, 0, stopRowKey.length); done = reversed ? cmp < 0 : cmp > 0; return done; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java index a86b257..65e62c3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.util.ArrayList; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; import org.apache.yetus.audience.InterfaceAudience; @@ -268,7 +268,7 @@ public class SingleColumnValueFilter extends FilterBase { } private boolean filterColumnValue(final Cell cell) { - int compareResult = CellComparator.compareValue(cell, this.comparator); + int compareResult = CellComparatorImpl.compareValue(cell, this.comparator); return CompareFilter.compare(this.op, compareResult); } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java index 0925974..0f11156 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java @@ -32,7 +32,7 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.commons.lang3.NotImplementedException; import org.apache.commons.logging.Log; @@ -677,7 +677,7 @@ public class TestClientNoCluster extends Configured implements Tool { * Comparator for meta row keys. */ private static class MetaRowsComparator implements Comparator { - private final CellComparator delegate = CellComparator.META_COMPARATOR; + private final CellComparatorImpl delegate = CellComparatorImpl.META_COMPARATOR; @Override public int compare(byte[] left, byte[] right) { return delegate.compareRows(new KeyValue.KeyOnlyKeyValue(left), right, 0, right.length); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java index fa9c4ad..800de6d 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java @@ -29,7 +29,7 @@ import java.util.List; import java.util.Map; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -388,7 +388,7 @@ public class TestOperation { Assert.assertEquals(1984L, c.get(0).getTimestamp()); Assert.assertArrayEquals(VALUE, CellUtil.cloneValue(c.get(0))); Assert.assertEquals(HConstants.LATEST_TIMESTAMP, p.getTimeStamp()); - Assert.assertEquals(0, CellComparator.COMPARATOR.compare(c.get(0), new KeyValue(c.get(0)))); + Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(c.get(0), new KeyValue(c.get(0)))); p = new Put(ROW); p.addColumn(FAMILY, ByteBuffer.wrap(QUALIFIER), 2013L, null); @@ -397,7 +397,7 @@ public class TestOperation { Assert.assertEquals(2013L, c.get(0).getTimestamp()); Assert.assertArrayEquals(new byte[]{}, CellUtil.cloneValue(c.get(0))); Assert.assertEquals(HConstants.LATEST_TIMESTAMP, p.getTimeStamp()); - Assert.assertEquals(0, CellComparator.COMPARATOR.compare(c.get(0), new KeyValue(c.get(0)))); + Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(c.get(0), new KeyValue(c.get(0)))); p = new Put(ByteBuffer.wrap(ROW)); p.addColumn(FAMILY, ByteBuffer.wrap(QUALIFIER), 2001L, null); @@ -407,7 +407,7 @@ public class TestOperation { Assert.assertArrayEquals(new byte[]{}, CellUtil.cloneValue(c.get(0))); Assert.assertArrayEquals(ROW, CellUtil.cloneRow(c.get(0))); Assert.assertEquals(HConstants.LATEST_TIMESTAMP, p.getTimeStamp()); - Assert.assertEquals(0, CellComparator.COMPARATOR.compare(c.get(0), new KeyValue(c.get(0)))); + Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(c.get(0), new KeyValue(c.get(0)))); p = new Put(ByteBuffer.wrap(ROW), 1970L); p.addColumn(FAMILY, ByteBuffer.wrap(QUALIFIER), 2001L, null); @@ -417,7 +417,7 @@ public class TestOperation { Assert.assertArrayEquals(new byte[]{}, CellUtil.cloneValue(c.get(0))); Assert.assertArrayEquals(ROW, CellUtil.cloneRow(c.get(0))); Assert.assertEquals(1970L, p.getTimeStamp()); - Assert.assertEquals(0, CellComparator.COMPARATOR.compare(c.get(0), new KeyValue(c.get(0)))); + Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(c.get(0), new KeyValue(c.get(0)))); } @Test diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestComparators.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestComparators.java index 0c69ece..43ff538 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestComparators.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestComparators.java @@ -24,7 +24,7 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -53,44 +53,44 @@ public class TestComparators { ByteBuffer buffer = ByteBuffer.wrap(kv.getBuffer()); Cell bbCell = new ByteBufferKeyValue(buffer, 0, buffer.remaining()); ByteArrayComparable comparable = new BinaryComparator(r1); - assertEquals(0, CellComparator.compareRow(bbCell, comparable)); - assertEquals(0, CellComparator.compareRow(kv, comparable)); + assertEquals(0, CellComparatorImpl.compareRow(bbCell, comparable)); + assertEquals(0, CellComparatorImpl.compareRow(kv, comparable)); kv = new KeyValue(r0, f, q1, v1); buffer = ByteBuffer.wrap(kv.getBuffer()); bbCell = new ByteBufferKeyValue(buffer, 0, buffer.remaining()); - assertTrue(CellComparator.compareRow(bbCell, comparable) > 0); - assertTrue(CellComparator.compareRow(kv, comparable) > 0); + assertTrue(CellComparatorImpl.compareRow(bbCell, comparable) > 0); + assertTrue(CellComparatorImpl.compareRow(kv, comparable) > 0); kv = new KeyValue(r2, f, q1, v1); buffer = ByteBuffer.wrap(kv.getBuffer()); bbCell = new ByteBufferKeyValue(buffer, 0, buffer.remaining()); - assertTrue(CellComparator.compareRow(bbCell, comparable) < 0); - assertTrue(CellComparator.compareRow(kv, comparable) < 0); + assertTrue(CellComparatorImpl.compareRow(bbCell, comparable) < 0); + assertTrue(CellComparatorImpl.compareRow(kv, comparable) < 0); // Qualifier compare comparable = new BinaryPrefixComparator(Bytes.toBytes("qual")); - assertEquals(0, CellComparator.compareQualifier(bbCell, comparable)); - assertEquals(0, CellComparator.compareQualifier(kv, comparable)); + assertEquals(0, CellComparatorImpl.compareQualifier(bbCell, comparable)); + assertEquals(0, CellComparatorImpl.compareQualifier(kv, comparable)); kv = new KeyValue(r2, f, q2, v1); buffer = ByteBuffer.wrap(kv.getBuffer()); bbCell = new ByteBufferKeyValue(buffer, 0, buffer.remaining()); - assertEquals(0, CellComparator.compareQualifier(bbCell, comparable)); - assertEquals(0, CellComparator.compareQualifier(kv, comparable)); + assertEquals(0, CellComparatorImpl.compareQualifier(bbCell, comparable)); + assertEquals(0, CellComparatorImpl.compareQualifier(kv, comparable)); kv = new KeyValue(r2, f, q3, v1); buffer = ByteBuffer.wrap(kv.getBuffer()); bbCell = new ByteBufferKeyValue(buffer, 0, buffer.remaining()); - assertTrue(CellComparator.compareQualifier(bbCell, comparable) < 0); - assertTrue(CellComparator.compareQualifier(kv, comparable) < 0); + assertTrue(CellComparatorImpl.compareQualifier(bbCell, comparable) < 0); + assertTrue(CellComparatorImpl.compareQualifier(kv, comparable) < 0); // Value compare comparable = new LongComparator(l1); - assertEquals(0, CellComparator.compareValue(bbCell, comparable)); - assertEquals(0, CellComparator.compareValue(kv, comparable)); + assertEquals(0, CellComparatorImpl.compareValue(bbCell, comparable)); + assertEquals(0, CellComparatorImpl.compareValue(kv, comparable)); kv = new KeyValue(r1, f, q1, v2); buffer = ByteBuffer.wrap(kv.getBuffer()); bbCell = new ByteBufferKeyValue(buffer, 0, buffer.remaining()); - assertTrue(CellComparator.compareValue(bbCell, comparable) < 0); - assertTrue(CellComparator.compareValue(kv, comparable) < 0); + assertTrue(CellComparatorImpl.compareValue(bbCell, comparable) < 0); + assertTrue(CellComparatorImpl.compareValue(kv, comparable) < 0); // Family compare comparable = new SubstringComparator("cf"); - assertEquals(0, CellComparator.compareFamily(bbCell, comparable)); - assertEquals(0, CellComparator.compareFamily(kv, comparable)); + assertEquals(0, CellComparatorImpl.compareFamily(bbCell, comparable)); + assertEquals(0, CellComparatorImpl.compareFamily(kv, comparable)); } } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java index 469c3ea..91b02e6 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java @@ -26,7 +26,7 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderType; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Append; @@ -261,7 +261,7 @@ public class TestProtobufUtil { ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(dbb, kv1.getLength(), kv2.getLength()); CellProtos.Cell cell = ProtobufUtil.toCell(offheapKV); Cell newOffheapKV = ProtobufUtil.toCell(ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), cell); - assertTrue(CellComparator.COMPARATOR.compare(offheapKV, newOffheapKV) == 0); + assertTrue(CellComparatorImpl.COMPARATOR.compare(offheapKV, newOffheapKV) == 0); } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java index 567b10c..33713f2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java @@ -15,630 +15,88 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase; -import java.io.Serializable; import java.util.Comparator; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; -import org.apache.hadoop.hbase.filter.ByteArrayComparable; -import org.apache.hadoop.hbase.util.ByteBufferUtils; -import org.apache.hadoop.hbase.util.Bytes; - -import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Longs; - /** - * Compare two HBase cells. Do not use this method comparing -ROOT- or - * hbase:meta cells. Cells from these tables need a specialized comparator, one that - * takes account of the special formatting of the row where we have commas to delimit table from - * regionname, from row. See KeyValue for how it has a special comparator to do hbase:meta cells - * and yet another for -ROOT-. - * While using this comparator for {{@link #compareRows(Cell, Cell)} et al, the hbase:meta cells - * format should be taken into consideration, for which the instance of this comparator - * should be used. In all other cases the static APIs in this comparator would be enough + * Used for comparing cells and has some specialized methods that allows comparing individual + * cell components like row, family, qualifier and timestamp */ -@edu.umd.cs.findbugs.annotations.SuppressWarnings( - value="UNKNOWN", - justification="Findbugs doesn't like the way we are negating the result of a compare in below") -@InterfaceAudience.Private +@InterfaceAudience.Public @InterfaceStability.Evolving -public class CellComparator implements Comparator, Serializable { - static final Log LOG = LogFactory.getLog(CellComparator.class); - private static final long serialVersionUID = -8760041766259623329L; - - /** - * Comparator for plain key/values; i.e. non-catalog table key/values. Works on Key portion - * of KeyValue only. - */ - public static final CellComparator COMPARATOR = new CellComparator(); - /** - * A {@link CellComparator} for hbase:meta catalog table - * {@link KeyValue}s. - */ - public static final CellComparator META_COMPARATOR = new MetaCellComparator(); - - @Override - public int compare(Cell a, Cell b) { - return compare(a, b, false); - } - - /** - * Compares only the key portion of a cell. It does not include the sequence id/mvcc of the - * cell - * @param left - * @param right - * @return an int greater than 0 if left > than right - * lesser than 0 if left < than right - * equal to 0 if left is equal to right - */ - public final int compareKeyIgnoresMvcc(Cell left, Cell right) { - return compare(left, right, true); - } - - /** - * Used when a cell needs to be compared with a key byte[] such as cases of - * finding the index from the index block, bloom keys from the bloom blocks - * This byte[] is expected to be serialized in the KeyValue serialization format - * If the KeyValue (Cell's) serialization format changes this method cannot be used. - * @param left the cell to be compared - * @param key the serialized key part of a KeyValue - * @param offset the offset in the key byte[] - * @param length the length of the key byte[] - * @return an int greater than 0 if left is greater than right - * lesser than 0 if left is lesser than right - * equal to 0 if left is equal to right - */ - public final int compare(Cell left, byte[] key, int offset, int length) { - // row - short rrowlength = Bytes.toShort(key, offset); - int c = compareRows(left, key, offset + Bytes.SIZEOF_SHORT, rrowlength); - if (c != 0) return c; - - // Compare the rest of the two KVs without making any assumptions about - // the common prefix. This function will not compare rows anyway, so we - // don't need to tell it that the common prefix includes the row. - return compareWithoutRow(left, key, offset, length, rrowlength); - } - - /** - * Compare cells. - * @param a - * @param b - * @param ignoreSequenceid True if we are to compare the key portion only and ignore - * the sequenceid. Set to false to compare key and consider sequenceid. - * @return 0 if equal, -1 if a < b, and +1 if a > b. - */ - private final int compare(final Cell a, final Cell b, boolean ignoreSequenceid) { - // row - int c = compareRows(a, b); - if (c != 0) return c; - - c = compareWithoutRow(a, b); - if(c != 0) return c; - - if (!ignoreSequenceid) { - // Negate following comparisons so later edits show up first - // mvccVersion: later sorts first - return Longs.compare(b.getSequenceId(), a.getSequenceId()); - } else { - return c; - } - } - - /** - * Compares the family and qualifier part of the cell - * @param left the left cell - * @param right the right cell - * @return 0 if both cells are equal, 1 if left cell is bigger than right, -1 otherwise - */ - public final static int compareColumns(final Cell left, final Cell right) { - int diff = compareFamilies(left, right); - if (diff != 0) { - return diff; - } - return compareQualifiers(left, right); - } - - private final static int compareColumns(Cell left, byte[] right, int rfoffset, int rflength, - int rqoffset, int rqlength) { - int diff = compareFamilies(left, right, rfoffset, rflength); - if (diff != 0) - return diff; - return compareQualifiers(left, right, rqoffset, rqlength); - } - - /** - * Compare the families of left and right cell - * @param left - * @param right - * @return 0 if both cells are equal, 1 if left cell is bigger than right, -1 otherwise - */ - public final static int compareFamilies(Cell left, Cell right) { - if (left instanceof ByteBufferCell && right instanceof ByteBufferCell) { - return ByteBufferUtils.compareTo(((ByteBufferCell) left).getFamilyByteBuffer(), - ((ByteBufferCell) left).getFamilyPosition(), left.getFamilyLength(), - ((ByteBufferCell) right).getFamilyByteBuffer(), - ((ByteBufferCell) right).getFamilyPosition(), right.getFamilyLength()); - } - if (left instanceof ByteBufferCell) { - return ByteBufferUtils.compareTo(((ByteBufferCell) left).getFamilyByteBuffer(), - ((ByteBufferCell) left).getFamilyPosition(), left.getFamilyLength(), - right.getFamilyArray(), right.getFamilyOffset(), right.getFamilyLength()); - } - if (right instanceof ByteBufferCell) { - // Notice how we flip the order of the compare here. We used to negate the return value but - // see what FindBugs says - // http://findbugs.sourceforge.net/bugDescriptions.html#RV_NEGATING_RESULT_OF_COMPARETO - // It suggest flipping the order to get same effect and 'safer'. - return ByteBufferUtils.compareTo( - left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(), - ((ByteBufferCell)right).getFamilyByteBuffer(), - ((ByteBufferCell)right).getFamilyPosition(), right.getFamilyLength()); - } - return Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(), - right.getFamilyArray(), right.getFamilyOffset(), right.getFamilyLength()); - } - - private final static int compareFamilies(Cell left, byte[] right, int roffset, int rlength) { - if (left instanceof ByteBufferCell) { - return ByteBufferUtils.compareTo(((ByteBufferCell) left).getFamilyByteBuffer(), - ((ByteBufferCell) left).getFamilyPosition(), left.getFamilyLength(), right, - roffset, rlength); - } - return Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(), - right, roffset, rlength); - } - - /** - * Compare the qualifiers part of the left and right cells. - * @param left - * @param right - * @return 0 if both cells are equal, 1 if left cell is bigger than right, -1 otherwise - */ - public final static int compareQualifiers(Cell left, Cell right) { - if (left instanceof ByteBufferCell && right instanceof ByteBufferCell) { - return ByteBufferUtils - .compareTo(((ByteBufferCell) left).getQualifierByteBuffer(), - ((ByteBufferCell) left).getQualifierPosition(), - left.getQualifierLength(), ((ByteBufferCell) right).getQualifierByteBuffer(), - ((ByteBufferCell) right).getQualifierPosition(), - right.getQualifierLength()); - } - if (left instanceof ByteBufferCell) { - return ByteBufferUtils.compareTo(((ByteBufferCell) left).getQualifierByteBuffer(), - ((ByteBufferCell) left).getQualifierPosition(), left.getQualifierLength(), - right.getQualifierArray(), right.getQualifierOffset(), right.getQualifierLength()); - } - if (right instanceof ByteBufferCell) { - // Notice how we flip the order of the compare here. We used to negate the return value but - // see what FindBugs says - // http://findbugs.sourceforge.net/bugDescriptions.html#RV_NEGATING_RESULT_OF_COMPARETO - // It suggest flipping the order to get same effect and 'safer'. - return ByteBufferUtils.compareTo(left.getQualifierArray(), - left.getQualifierOffset(), left.getQualifierLength(), - ((ByteBufferCell)right).getQualifierByteBuffer(), - ((ByteBufferCell)right).getQualifierPosition(), right.getQualifierLength()); - } - return Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset(), - left.getQualifierLength(), right.getQualifierArray(), right.getQualifierOffset(), - right.getQualifierLength()); - } - - public final static int compareQualifiers(Cell left, byte[] right, int rOffset, int rLength) { - if (left instanceof ByteBufferCell) { - return ByteBufferUtils.compareTo(((ByteBufferCell) left).getQualifierByteBuffer(), - ((ByteBufferCell) left).getQualifierPosition(), left.getQualifierLength(), - right, rOffset, rLength); - } - return Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset(), - left.getQualifierLength(), right, rOffset, rLength); - } - - /** - * Compare columnFamily, qualifier, timestamp, and key type (everything - * except the row). This method is used both in the normal comparator and - * the "same-prefix" comparator. Note that we are assuming that row portions - * of both KVs have already been parsed and found identical, and we don't - * validate that assumption here. - * @param commonPrefix - * the length of the common prefix of the two key-values being - * compared, including row length and row - */ - private final int compareWithoutRow(Cell left, - byte[] right, int roffset, int rlength, short rowlength) { - /*** - * KeyValue Format and commonLength: - * |_keyLen_|_valLen_|_rowLen_|_rowKey_|_famiLen_|_fami_|_Quali_|.... - * ------------------|-------commonLength--------|-------------- - */ - int commonLength = KeyValue.ROW_LENGTH_SIZE + KeyValue.FAMILY_LENGTH_SIZE + rowlength; - - // commonLength + TIMESTAMP_TYPE_SIZE - int commonLengthWithTSAndType = KeyValue.TIMESTAMP_TYPE_SIZE + commonLength; - // ColumnFamily + Qualifier length. - int lcolumnlength = left.getFamilyLength() + left.getQualifierLength(); - int rcolumnlength = rlength - commonLengthWithTSAndType; - - byte ltype = left.getTypeByte(); - byte rtype = right[roffset + (rlength - 1)]; - - // If the column is not specified, the "minimum" key type appears the - // latest in the sorted order, regardless of the timestamp. This is used - // for specifying the last key/value in a given row, because there is no - // "lexicographically last column" (it would be infinitely long). The - // "maximum" key type does not need this behavior. - if (lcolumnlength == 0 && ltype == Type.Minimum.getCode()) { - // left is "bigger", i.e. it appears later in the sorted order - return 1; - } - if (rcolumnlength == 0 && rtype == Type.Minimum.getCode()) { - return -1; - } - - int rfamilyoffset = commonLength + roffset; - - // Column family length. - int lfamilylength = left.getFamilyLength(); - int rfamilylength = right[rfamilyoffset - 1]; - // If left family size is not equal to right family size, we need not - // compare the qualifiers. - boolean sameFamilySize = (lfamilylength == rfamilylength); - if (!sameFamilySize) { - // comparing column family is enough. - return compareFamilies(left, right, rfamilyoffset, rfamilylength); - } - // Compare family & qualifier together. - // Families are same. Compare on qualifiers. - int comparison = compareColumns(left, right, rfamilyoffset, rfamilylength, rfamilyoffset - + rfamilylength, (rcolumnlength - rfamilylength)); - if (comparison != 0) { - return comparison; - } - - // // - // Next compare timestamps. - long rtimestamp = Bytes.toLong(right, roffset + (rlength - KeyValue.TIMESTAMP_TYPE_SIZE)); - int compare = compareTimestamps(left.getTimestamp(), rtimestamp); - if (compare != 0) { - return compare; - } - - // Compare types. Let the delete types sort ahead of puts; i.e. types - // of higher numbers sort before those of lesser numbers. Maximum (255) - // appears ahead of everything, and minimum (0) appears after - // everything. - return (0xff & rtype) - (0xff & ltype); - } - - /** - * Compares the rows of the left and right cell. - * For the hbase:meta case this method is overridden such that it can handle hbase:meta cells. - * The caller should ensure using the appropriate comparator for hbase:meta. - * @param left - * @param right - * @return 0 if both cells are equal, 1 if left cell is bigger than right, -1 otherwise - */ - public int compareRows(final Cell left, final Cell right) { - // left and right can be exactly the same at the beginning of a row - if (left == right) { - return 0; - } - if (left instanceof ByteBufferCell && right instanceof ByteBufferCell) { - return ByteBufferUtils.compareTo(((ByteBufferCell) left).getRowByteBuffer(), - ((ByteBufferCell) left).getRowPosition(), left.getRowLength(), - ((ByteBufferCell) right).getRowByteBuffer(), - ((ByteBufferCell) right).getRowPosition(), right.getRowLength()); - } - if (left instanceof ByteBufferCell) { - return ByteBufferUtils.compareTo(((ByteBufferCell) left).getRowByteBuffer(), - ((ByteBufferCell) left).getRowPosition(), left.getRowLength(), - right.getRowArray(), right.getRowOffset(), right.getRowLength()); - } - if (right instanceof ByteBufferCell) { - // Notice how we flip the order of the compare here. We used to negate the return value but - // see what FindBugs says - // http://findbugs.sourceforge.net/bugDescriptions.html#RV_NEGATING_RESULT_OF_COMPARETO - // It suggest flipping the order to get same effect and 'safer'. - return ByteBufferUtils.compareTo(left.getRowArray(), left.getRowOffset(), left.getRowLength(), - ((ByteBufferCell)right).getRowByteBuffer(), - ((ByteBufferCell)right).getRowPosition(), right.getRowLength()); - } - return Bytes.compareTo(left.getRowArray(), left.getRowOffset(), left.getRowLength(), - right.getRowArray(), right.getRowOffset(), right.getRowLength()); - } +public interface CellComparator extends Comparator { /** - * Compares the row part of the cell with a simple plain byte[] like the - * stopRow in Scan. This should be used with context where for hbase:meta - * cells the {{@link #META_COMPARATOR} should be used - * - * @param left - * the cell to be compared - * @param right - * the kv serialized byte[] to be compared with - * @param roffset - * the offset in the byte[] - * @param rlength - * the length in the byte[] - * @return 0 if both cell and the byte[] are equal, 1 if the cell is bigger - * than byte[], -1 otherwise + * Lexographically compares two cells. The key part of the cell is taken for comparison which + * includes row, family, qualifier, timestamp and type + * @param leftCell the left hand side cell + * @param rightCell the right hand side cell + * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both + * cells are equal */ - public int compareRows(Cell left, byte[] right, int roffset, int rlength) { - if (left instanceof ByteBufferCell) { - return ByteBufferUtils.compareTo(((ByteBufferCell) left).getRowByteBuffer(), - ((ByteBufferCell) left).getRowPosition(), left.getRowLength(), right, - roffset, rlength); - } - return Bytes.compareTo(left.getRowArray(), left.getRowOffset(), left.getRowLength(), right, - roffset, rlength); - } - - public static int compareWithoutRow(final Cell left, final Cell right) { - // If the column is not specified, the "minimum" key type appears the - // latest in the sorted order, regardless of the timestamp. This is used - // for specifying the last key/value in a given row, because there is no - // "lexicographically last column" (it would be infinitely long). The - // "maximum" key type does not need this behavior. - // Copied from KeyValue. This is bad in that we can't do memcmp w/ special rules like this. - int lFamLength = left.getFamilyLength(); - int rFamLength = right.getFamilyLength(); - int lQualLength = left.getQualifierLength(); - int rQualLength = right.getQualifierLength(); - if (lFamLength + lQualLength == 0 - && left.getTypeByte() == Type.Minimum.getCode()) { - // left is "bigger", i.e. it appears later in the sorted order - return 1; - } - if (rFamLength + rQualLength == 0 - && right.getTypeByte() == Type.Minimum.getCode()) { - return -1; - } - if (lFamLength != rFamLength) { - // comparing column family is enough. - return compareFamilies(left, right); - } - // Compare cf:qualifier - int diff = compareColumns(left, right); - if (diff != 0) return diff; - - diff = compareTimestamps(left, right); - if (diff != 0) return diff; - - // Compare types. Let the delete types sort ahead of puts; i.e. types - // of higher numbers sort before those of lesser numbers. Maximum (255) - // appears ahead of everything, and minimum (0) appears after - // everything. - return (0xff & right.getTypeByte()) - (0xff & left.getTypeByte()); - } + int compare(Cell leftCell, Cell rightCell); /** - * Compares cell's timestamps in DESCENDING order. - * The below older timestamps sorting ahead of newer timestamps looks - * wrong but it is intentional. This way, newer timestamps are first - * found when we iterate over a memstore and newer versions are the - * first we trip over when reading from a store file. - * @return 1 if left's timestamp < right's timestamp - * -1 if left's timestamp > right's timestamp - * 0 if both timestamps are equal + * Lexographically compares the rows of two cells. + * @param leftCell the left hand side cell + * @param rightCell the right hand side cell + * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both + * cells are equal */ - public static int compareTimestamps(final Cell left, final Cell right) { - return compareTimestamps(left.getTimestamp(), right.getTimestamp()); - } + int compareRows(Cell leftCell, Cell rightCell); /** - * Used to compare two cells based on the column hint provided. This is specifically - * used when we need to optimize the seeks based on the next indexed key. This is an - * advanced usage API specifically needed for some optimizations. - * @param nextIndexedCell the next indexed cell - * @param currentCell the cell to be compared - * @param foff the family offset of the currentCell - * @param flen the family length of the currentCell - * @param colHint the column hint provided - could be null - * @param coff the offset of the column hint if provided, if not offset of the currentCell's - * qualifier - * @param clen the length of the column hint if provided, if not length of the currentCell's - * qualifier - * @param ts the timestamp to be seeked - * @param type the type to be seeked - * @return an int based on the given column hint - * TODO : To be moved out of here because this is a special API used in scan - * optimization. + * Lexographically compares the two cells excluding the row part. It compares family, qualifier, + * timestamp and the type + * @param leftCell the left hand side cell + * @param rightCell the right hand side cell + * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both + * cells are equal */ - // compare a key against row/fam/qual/ts/type - public final int compareKeyBasedOnColHint(Cell nextIndexedCell, Cell currentCell, int foff, - int flen, byte[] colHint, int coff, int clen, long ts, byte type) { - int compare = compareRows(nextIndexedCell, currentCell); - if (compare != 0) { - return compare; - } - // If the column is not specified, the "minimum" key type appears the - // latest in the sorted order, regardless of the timestamp. This is used - // for specifying the last key/value in a given row, because there is no - // "lexicographically last column" (it would be infinitely long). The - // "maximum" key type does not need this behavior. - if (nextIndexedCell.getFamilyLength() + nextIndexedCell.getQualifierLength() == 0 - && nextIndexedCell.getTypeByte() == Type.Minimum.getCode()) { - // left is "bigger", i.e. it appears later in the sorted order - return 1; - } - if (flen + clen == 0 && type == Type.Minimum.getCode()) { - return -1; - } - - compare = compareFamilies(nextIndexedCell, currentCell); - if (compare != 0) { - return compare; - } - if (colHint == null) { - compare = compareQualifiers(nextIndexedCell, currentCell); - } else { - compare = compareQualifiers(nextIndexedCell, colHint, coff, clen); - } - if (compare != 0) { - return compare; - } - // Next compare timestamps. - compare = compareTimestamps(nextIndexedCell.getTimestamp(), ts); - if (compare != 0) { - return compare; - } - - // Compare types. Let the delete types sort ahead of puts; i.e. types - // of higher numbers sort before those of lesser numbers. Maximum (255) - // appears ahead of everything, and minimum (0) appears after - // everything. - return (0xff & type) - (0xff & nextIndexedCell.getTypeByte()); - } + int compareWithoutRow(Cell leftCell, Cell rightCell); /** - * Compares timestamps in DESCENDING order. - * The below older timestamps sorting ahead of newer timestamps looks - * wrong but it is intentional. This way, newer timestamps are first - * found when we iterate over a memstore and newer versions are the - * first we trip over when reading from a store file. - * @return 1 if left timestamp < right timestamp - * -1 if left timestamp > right timestamp - * 0 if both timestamps are equal + * Lexographically compares the families of the two cells + * @param leftCell the left hand side cell + * @param rightCell the right hand side cell + * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both + * cells are equal */ - public static int compareTimestamps(final long ltimestamp, final long rtimestamp) { - if (ltimestamp < rtimestamp) { - return 1; - } else if (ltimestamp > rtimestamp) { - return -1; - } - return 0; - } + int compareFamilies(Cell leftCell, Cell rightCell); /** - * Compare cell's row against given comparator - * @param cell - * @param comparator - * @return result comparing cell's row + * Lexographically compares the qualifiers of the two cells + * @param leftCell the left hand side cell + * @param rightCell the right hand side cell + * @return greater than 0 if leftCell is bigger, less than 0 if rightCell is bigger, 0 if both + * cells are equal */ - public static int compareRow(Cell cell, ByteArrayComparable comparator) { - if (cell instanceof ByteBufferCell) { - return comparator.compareTo(((ByteBufferCell) cell).getRowByteBuffer(), - ((ByteBufferCell) cell).getRowPosition(), cell.getRowLength()); - } - return comparator.compareTo(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); - } + int compareQualifiers(Cell leftCell, Cell rightCell); /** - * Compare cell's column family against given comparator - * @param cell - * @param comparator - * @return result comparing cell's column family + * Compares cell's timestamps in DESCENDING order. The below older timestamps sorting ahead of + * newer timestamps looks wrong but it is intentional. This way, newer timestamps are first found + * when we iterate over a memstore and newer versions are the first we trip over when reading from + * a store file. + * @param leftCell the left hand side cell + * @param rightCell the right hand side cell + * @return 1 if left's timestamp < right's timestamp -1 if left's timestamp > right's + * timestamp 0 if both timestamps are equal */ - public static int compareFamily(Cell cell, ByteArrayComparable comparator) { - if (cell instanceof ByteBufferCell) { - return comparator.compareTo(((ByteBufferCell) cell).getFamilyByteBuffer(), - ((ByteBufferCell) cell).getFamilyPosition(), cell.getFamilyLength()); - } - return comparator.compareTo(cell.getFamilyArray(), cell.getFamilyOffset(), - cell.getFamilyLength()); - } + int compareTimestamps(Cell leftCell, Cell rightCell); /** - * Compare cell's qualifier against given comparator - * @param cell - * @param comparator - * @return result comparing cell's qualifier + * Compares cell's timestamps in DESCENDING order. The below older timestamps sorting ahead of + * newer timestamps looks wrong but it is intentional. This way, newer timestamps are first found + * when we iterate over a memstore and newer versions are the first we trip over when reading from + * a store file. + * @param leftCellts the left cell's timestamp + * @param rightCellts the right cell's timestamp + * @return 1 if left's timestamp < right's timestamp -1 if left's timestamp > right's + * timestamp 0 if both timestamps are equal */ - public static int compareQualifier(Cell cell, ByteArrayComparable comparator) { - if (cell instanceof ByteBufferCell) { - return comparator.compareTo(((ByteBufferCell) cell).getQualifierByteBuffer(), - ((ByteBufferCell) cell).getQualifierPosition(), cell.getQualifierLength()); - } - return comparator.compareTo(cell.getQualifierArray(), cell.getQualifierOffset(), - cell.getQualifierLength()); - } - - /** - * Compare cell's value against given comparator - * @param cell - * @param comparator - * @return result comparing cell's value - */ - public static int compareValue(Cell cell, ByteArrayComparable comparator) { - if (cell instanceof ByteBufferCell) { - return comparator.compareTo(((ByteBufferCell) cell).getValueByteBuffer(), - ((ByteBufferCell) cell).getValuePosition(), cell.getValueLength()); - } - return comparator.compareTo(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()); - } - - /** - * A {@link CellComparator} for hbase:meta catalog table - * {@link KeyValue}s. - */ - public static class MetaCellComparator extends CellComparator { - - @Override - public int compareRows(final Cell left, final Cell right) { - return compareRows(left.getRowArray(), left.getRowOffset(), left.getRowLength(), - right.getRowArray(), right.getRowOffset(), right.getRowLength()); - } - - @Override - public int compareRows(Cell left, byte[] right, int roffset, int rlength) { - return compareRows(left.getRowArray(), left.getRowOffset(), left.getRowLength(), right, - roffset, rlength); - } - - private int compareRows(byte[] left, int loffset, int llength, byte[] right, int roffset, - int rlength) { - int leftDelimiter = Bytes.searchDelimiterIndex(left, loffset, llength, HConstants.DELIMITER); - int rightDelimiter = Bytes - .searchDelimiterIndex(right, roffset, rlength, HConstants.DELIMITER); - // Compare up to the delimiter - int lpart = (leftDelimiter < 0 ? llength : leftDelimiter - loffset); - int rpart = (rightDelimiter < 0 ? rlength : rightDelimiter - roffset); - int result = Bytes.compareTo(left, loffset, lpart, right, roffset, rpart); - if (result != 0) { - return result; - } else { - if (leftDelimiter < 0 && rightDelimiter >= 0) { - return -1; - } else if (rightDelimiter < 0 && leftDelimiter >= 0) { - return 1; - } else if (leftDelimiter < 0 && rightDelimiter < 0) { - return 0; - } - } - // Compare middle bit of the row. - // Move past delimiter - leftDelimiter++; - rightDelimiter++; - int leftFarDelimiter = Bytes.searchDelimiterIndexInReverse(left, leftDelimiter, llength - - (leftDelimiter - loffset), HConstants.DELIMITER); - int rightFarDelimiter = Bytes.searchDelimiterIndexInReverse(right, rightDelimiter, rlength - - (rightDelimiter - roffset), HConstants.DELIMITER); - // Now compare middlesection of row. - lpart = (leftFarDelimiter < 0 ? llength + loffset : leftFarDelimiter) - leftDelimiter; - rpart = (rightFarDelimiter < 0 ? rlength + roffset : rightFarDelimiter) - rightDelimiter; - result = Bytes.compareTo(left, leftDelimiter, lpart, right, rightDelimiter, rpart); - if (result != 0) { - return result; - } else { - if (leftDelimiter < 0 && rightDelimiter >= 0) { - return -1; - } else if (rightDelimiter < 0 && leftDelimiter >= 0) { - return 1; - } else if (leftDelimiter < 0 && rightDelimiter < 0) { - return 0; - } - } - // Compare last part of row, the rowid. - leftFarDelimiter++; - rightFarDelimiter++; - result = Bytes.compareTo(left, leftFarDelimiter, llength - (leftFarDelimiter - loffset), - right, rightFarDelimiter, rlength - (rightFarDelimiter - roffset)); - return result; - } - } + int compareTimestamps(long leftCellts, long rightCellts); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java new file mode 100644 index 0000000..466cc8a --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java @@ -0,0 +1,647 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.KeyValue.Type; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; +import org.apache.hadoop.hbase.filter.ByteArrayComparable; +import org.apache.hadoop.hbase.util.ByteBufferUtils; +import org.apache.hadoop.hbase.util.Bytes; + +import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Longs; + +/** + * Compare two HBase cells. Do not use this method comparing -ROOT- or + * hbase:meta cells. Cells from these tables need a specialized comparator, one that + * takes account of the special formatting of the row where we have commas to delimit table from + * regionname, from row. See KeyValue for how it has a special comparator to do hbase:meta cells + * and yet another for -ROOT-. + * While using this comparator for {{@link #compareRows(Cell, Cell)} et al, the hbase:meta cells + * format should be taken into consideration, for which the instance of this comparator + * should be used. In all other cases the static APIs in this comparator would be enough + */ +@edu.umd.cs.findbugs.annotations.SuppressWarnings( + value="UNKNOWN", + justification="Findbugs doesn't like the way we are negating the result of a compare in below") +@InterfaceAudience.Private +@InterfaceStability.Evolving +public class CellComparatorImpl implements CellComparator { + static final Log LOG = LogFactory.getLog(CellComparatorImpl.class); + private static final long serialVersionUID = -8760041766259623329L; + + /** + * Comparator for plain key/values; i.e. non-catalog table key/values. Works on Key portion + * of KeyValue only. + */ + public static final CellComparatorImpl COMPARATOR = new CellComparatorImpl(); + /** + * A {@link CellComparatorImpl} for hbase:meta catalog table + * {@link KeyValue}s. + */ + public static final CellComparatorImpl META_COMPARATOR = new MetaCellComparator(); + + @Override + public int compare(Cell a, Cell b) { + return compare(a, b, false); + } + + /** + * Compares only the key portion of a cell. It does not include the sequence id/mvcc of the + * cell + * @param left + * @param right + * @return an int greater than 0 if left > than right + * lesser than 0 if left < than right + * equal to 0 if left is equal to right + */ + public final int compareKeyIgnoresMvcc(Cell left, Cell right) { + return compare(left, right, true); + } + + /** + * Used when a cell needs to be compared with a key byte[] such as cases of + * finding the index from the index block, bloom keys from the bloom blocks + * This byte[] is expected to be serialized in the KeyValue serialization format + * If the KeyValue (Cell's) serialization format changes this method cannot be used. + * @param left the cell to be compared + * @param key the serialized key part of a KeyValue + * @param offset the offset in the key byte[] + * @param length the length of the key byte[] + * @return an int greater than 0 if left is greater than right + * lesser than 0 if left is lesser than right + * equal to 0 if left is equal to right + */ + public final int compare(Cell left, byte[] key, int offset, int length) { + // row + short rrowlength = Bytes.toShort(key, offset); + int c = compareRows(left, key, offset + Bytes.SIZEOF_SHORT, rrowlength); + if (c != 0) return c; + + // Compare the rest of the two KVs without making any assumptions about + // the common prefix. This function will not compare rows anyway, so we + // don't need to tell it that the common prefix includes the row. + return compareWithoutRow(left, key, offset, length, rrowlength); + } + + /** + * Compare cells. + * @param a + * @param b + * @param ignoreSequenceid True if we are to compare the key portion only and ignore + * the sequenceid. Set to false to compare key and consider sequenceid. + * @return 0 if equal, -1 if a < b, and +1 if a > b. + */ + private final int compare(final Cell a, final Cell b, boolean ignoreSequenceid) { + // row + int c = compareRows(a, b); + if (c != 0) return c; + + c = compareWithoutRow(a, b); + if(c != 0) return c; + + if (!ignoreSequenceid) { + // Negate following comparisons so later edits show up first + // mvccVersion: later sorts first + return Longs.compare(b.getSequenceId(), a.getSequenceId()); + } else { + return c; + } + } + + /** + * Compares the family and qualifier part of the cell + * @param left the left cell + * @param right the right cell + * @return 0 if both cells are equal, 1 if left cell is bigger than right, -1 otherwise + */ + public final int compareColumns(final Cell left, final Cell right) { + int diff = compareFamilies(left, right); + if (diff != 0) { + return diff; + } + return compareQualifiers(left, right); + } + + private final static int compareColumns(Cell left, byte[] right, int rfoffset, int rflength, + int rqoffset, int rqlength) { + int diff = compareFamilies(left, right, rfoffset, rflength); + if (diff != 0) + return diff; + return compareQualifiers(left, right, rqoffset, rqlength); + } + + /** + * Compare the families of left and right cell + * @param left + * @param right + * @return 0 if both cells are equal, 1 if left cell is bigger than right, -1 otherwise + */ + @Override + public final int compareFamilies(Cell left, Cell right) { + if (left instanceof ByteBufferCell && right instanceof ByteBufferCell) { + return ByteBufferUtils.compareTo(((ByteBufferCell) left).getFamilyByteBuffer(), + ((ByteBufferCell) left).getFamilyPosition(), left.getFamilyLength(), + ((ByteBufferCell) right).getFamilyByteBuffer(), + ((ByteBufferCell) right).getFamilyPosition(), right.getFamilyLength()); + } + if (left instanceof ByteBufferCell) { + return ByteBufferUtils.compareTo(((ByteBufferCell) left).getFamilyByteBuffer(), + ((ByteBufferCell) left).getFamilyPosition(), left.getFamilyLength(), + right.getFamilyArray(), right.getFamilyOffset(), right.getFamilyLength()); + } + if (right instanceof ByteBufferCell) { + // Notice how we flip the order of the compare here. We used to negate the return value but + // see what FindBugs says + // http://findbugs.sourceforge.net/bugDescriptions.html#RV_NEGATING_RESULT_OF_COMPARETO + // It suggest flipping the order to get same effect and 'safer'. + return ByteBufferUtils.compareTo( + left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(), + ((ByteBufferCell)right).getFamilyByteBuffer(), + ((ByteBufferCell)right).getFamilyPosition(), right.getFamilyLength()); + } + return Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(), + right.getFamilyArray(), right.getFamilyOffset(), right.getFamilyLength()); + } + + private final static int compareFamilies(Cell left, byte[] right, int roffset, int rlength) { + if (left instanceof ByteBufferCell) { + return ByteBufferUtils.compareTo(((ByteBufferCell) left).getFamilyByteBuffer(), + ((ByteBufferCell) left).getFamilyPosition(), left.getFamilyLength(), right, + roffset, rlength); + } + return Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(), + right, roffset, rlength); + } + + /** + * Compare the qualifiers part of the left and right cells. + * @param left + * @param right + * @return 0 if both cells are equal, 1 if left cell is bigger than right, -1 otherwise + */ + @Override + public final int compareQualifiers(Cell left, Cell right) { + if (left instanceof ByteBufferCell && right instanceof ByteBufferCell) { + return ByteBufferUtils + .compareTo(((ByteBufferCell) left).getQualifierByteBuffer(), + ((ByteBufferCell) left).getQualifierPosition(), + left.getQualifierLength(), ((ByteBufferCell) right).getQualifierByteBuffer(), + ((ByteBufferCell) right).getQualifierPosition(), + right.getQualifierLength()); + } + if (left instanceof ByteBufferCell) { + return ByteBufferUtils.compareTo(((ByteBufferCell) left).getQualifierByteBuffer(), + ((ByteBufferCell) left).getQualifierPosition(), left.getQualifierLength(), + right.getQualifierArray(), right.getQualifierOffset(), right.getQualifierLength()); + } + if (right instanceof ByteBufferCell) { + // Notice how we flip the order of the compare here. We used to negate the return value but + // see what FindBugs says + // http://findbugs.sourceforge.net/bugDescriptions.html#RV_NEGATING_RESULT_OF_COMPARETO + // It suggest flipping the order to get same effect and 'safer'. + return ByteBufferUtils.compareTo(left.getQualifierArray(), + left.getQualifierOffset(), left.getQualifierLength(), + ((ByteBufferCell)right).getQualifierByteBuffer(), + ((ByteBufferCell)right).getQualifierPosition(), right.getQualifierLength()); + } + return Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset(), + left.getQualifierLength(), right.getQualifierArray(), right.getQualifierOffset(), + right.getQualifierLength()); + } + + public final static int compareQualifiers(Cell left, byte[] right, int rOffset, int rLength) { + if (left instanceof ByteBufferCell) { + return ByteBufferUtils.compareTo(((ByteBufferCell) left).getQualifierByteBuffer(), + ((ByteBufferCell) left).getQualifierPosition(), left.getQualifierLength(), + right, rOffset, rLength); + } + return Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset(), + left.getQualifierLength(), right, rOffset, rLength); + } + + /** + * Compare columnFamily, qualifier, timestamp, and key type (everything + * except the row). This method is used both in the normal comparator and + * the "same-prefix" comparator. Note that we are assuming that row portions + * of both KVs have already been parsed and found identical, and we don't + * validate that assumption here. + * @param commonPrefix + * the length of the common prefix of the two key-values being + * compared, including row length and row + */ + private final int compareWithoutRow(Cell left, + byte[] right, int roffset, int rlength, short rowlength) { + /*** + * KeyValue Format and commonLength: + * |_keyLen_|_valLen_|_rowLen_|_rowKey_|_famiLen_|_fami_|_Quali_|.... + * ------------------|-------commonLength--------|-------------- + */ + int commonLength = KeyValue.ROW_LENGTH_SIZE + KeyValue.FAMILY_LENGTH_SIZE + rowlength; + + // commonLength + TIMESTAMP_TYPE_SIZE + int commonLengthWithTSAndType = KeyValue.TIMESTAMP_TYPE_SIZE + commonLength; + // ColumnFamily + Qualifier length. + int lcolumnlength = left.getFamilyLength() + left.getQualifierLength(); + int rcolumnlength = rlength - commonLengthWithTSAndType; + + byte ltype = left.getTypeByte(); + byte rtype = right[roffset + (rlength - 1)]; + + // If the column is not specified, the "minimum" key type appears the + // latest in the sorted order, regardless of the timestamp. This is used + // for specifying the last key/value in a given row, because there is no + // "lexicographically last column" (it would be infinitely long). The + // "maximum" key type does not need this behavior. + if (lcolumnlength == 0 && ltype == Type.Minimum.getCode()) { + // left is "bigger", i.e. it appears later in the sorted order + return 1; + } + if (rcolumnlength == 0 && rtype == Type.Minimum.getCode()) { + return -1; + } + + int rfamilyoffset = commonLength + roffset; + + // Column family length. + int lfamilylength = left.getFamilyLength(); + int rfamilylength = right[rfamilyoffset - 1]; + // If left family size is not equal to right family size, we need not + // compare the qualifiers. + boolean sameFamilySize = (lfamilylength == rfamilylength); + if (!sameFamilySize) { + // comparing column family is enough. + return compareFamilies(left, right, rfamilyoffset, rfamilylength); + } + // Compare family & qualifier together. + // Families are same. Compare on qualifiers. + int comparison = compareColumns(left, right, rfamilyoffset, rfamilylength, rfamilyoffset + + rfamilylength, (rcolumnlength - rfamilylength)); + if (comparison != 0) { + return comparison; + } + + // // + // Next compare timestamps. + long rtimestamp = Bytes.toLong(right, roffset + (rlength - KeyValue.TIMESTAMP_TYPE_SIZE)); + int compare = compareTimestamps(left.getTimestamp(), rtimestamp); + if (compare != 0) { + return compare; + } + + // Compare types. Let the delete types sort ahead of puts; i.e. types + // of higher numbers sort before those of lesser numbers. Maximum (255) + // appears ahead of everything, and minimum (0) appears after + // everything. + return (0xff & rtype) - (0xff & ltype); + } + + /** + * Compares the rows of the left and right cell. + * For the hbase:meta case this method is overridden such that it can handle hbase:meta cells. + * The caller should ensure using the appropriate comparator for hbase:meta. + * @param left + * @param right + * @return 0 if both cells are equal, 1 if left cell is bigger than right, -1 otherwise + */ + @Override + public int compareRows(final Cell left, final Cell right) { + // left and right can be exactly the same at the beginning of a row + if (left == right) { + return 0; + } + if (left instanceof ByteBufferCell && right instanceof ByteBufferCell) { + return ByteBufferUtils.compareTo(((ByteBufferCell) left).getRowByteBuffer(), + ((ByteBufferCell) left).getRowPosition(), left.getRowLength(), + ((ByteBufferCell) right).getRowByteBuffer(), + ((ByteBufferCell) right).getRowPosition(), right.getRowLength()); + } + if (left instanceof ByteBufferCell) { + return ByteBufferUtils.compareTo(((ByteBufferCell) left).getRowByteBuffer(), + ((ByteBufferCell) left).getRowPosition(), left.getRowLength(), + right.getRowArray(), right.getRowOffset(), right.getRowLength()); + } + if (right instanceof ByteBufferCell) { + // Notice how we flip the order of the compare here. We used to negate the return value but + // see what FindBugs says + // http://findbugs.sourceforge.net/bugDescriptions.html#RV_NEGATING_RESULT_OF_COMPARETO + // It suggest flipping the order to get same effect and 'safer'. + return ByteBufferUtils.compareTo(left.getRowArray(), left.getRowOffset(), left.getRowLength(), + ((ByteBufferCell)right).getRowByteBuffer(), + ((ByteBufferCell)right).getRowPosition(), right.getRowLength()); + } + return Bytes.compareTo(left.getRowArray(), left.getRowOffset(), left.getRowLength(), + right.getRowArray(), right.getRowOffset(), right.getRowLength()); + } + + /** + * Compares the row part of the cell with a simple plain byte[] like the + * stopRow in Scan. This should be used with context where for hbase:meta + * cells the {{@link #META_COMPARATOR} should be used + * + * @param left + * the cell to be compared + * @param right + * the kv serialized byte[] to be compared with + * @param roffset + * the offset in the byte[] + * @param rlength + * the length in the byte[] + * @return 0 if both cell and the byte[] are equal, 1 if the cell is bigger + * than byte[], -1 otherwise + */ + public int compareRows(Cell left, byte[] right, int roffset, int rlength) { + if (left instanceof ByteBufferCell) { + return ByteBufferUtils.compareTo(((ByteBufferCell) left).getRowByteBuffer(), + ((ByteBufferCell) left).getRowPosition(), left.getRowLength(), right, + roffset, rlength); + } + return Bytes.compareTo(left.getRowArray(), left.getRowOffset(), left.getRowLength(), right, + roffset, rlength); + } + + @Override + public final int compareWithoutRow(final Cell left, final Cell right) { + // If the column is not specified, the "minimum" key type appears the + // latest in the sorted order, regardless of the timestamp. This is used + // for specifying the last key/value in a given row, because there is no + // "lexicographically last column" (it would be infinitely long). The + // "maximum" key type does not need this behavior. + // Copied from KeyValue. This is bad in that we can't do memcmp w/ special rules like this. + int lFamLength = left.getFamilyLength(); + int rFamLength = right.getFamilyLength(); + int lQualLength = left.getQualifierLength(); + int rQualLength = right.getQualifierLength(); + if (lFamLength + lQualLength == 0 + && left.getTypeByte() == Type.Minimum.getCode()) { + // left is "bigger", i.e. it appears later in the sorted order + return 1; + } + if (rFamLength + rQualLength == 0 + && right.getTypeByte() == Type.Minimum.getCode()) { + return -1; + } + if (lFamLength != rFamLength) { + // comparing column family is enough. + return compareFamilies(left, right); + } + // Compare cf:qualifier + int diff = compareColumns(left, right); + if (diff != 0) return diff; + + diff = compareTimestamps(left, right); + if (diff != 0) return diff; + + // Compare types. Let the delete types sort ahead of puts; i.e. types + // of higher numbers sort before those of lesser numbers. Maximum (255) + // appears ahead of everything, and minimum (0) appears after + // everything. + return (0xff & right.getTypeByte()) - (0xff & left.getTypeByte()); + } + + /** + * Compares cell's timestamps in DESCENDING order. + * The below older timestamps sorting ahead of newer timestamps looks + * wrong but it is intentional. This way, newer timestamps are first + * found when we iterate over a memstore and newer versions are the + * first we trip over when reading from a store file. + * @return 1 if left's timestamp < right's timestamp + * -1 if left's timestamp > right's timestamp + * 0 if both timestamps are equal + */ + @Override + public int compareTimestamps(final Cell left, final Cell right) { + return compareTimestamps(left.getTimestamp(), right.getTimestamp()); + } + + /** + * Used to compare two cells based on the column hint provided. This is specifically + * used when we need to optimize the seeks based on the next indexed key. This is an + * advanced usage API specifically needed for some optimizations. + * @param nextIndexedCell the next indexed cell + * @param currentCell the cell to be compared + * @param foff the family offset of the currentCell + * @param flen the family length of the currentCell + * @param colHint the column hint provided - could be null + * @param coff the offset of the column hint if provided, if not offset of the currentCell's + * qualifier + * @param clen the length of the column hint if provided, if not length of the currentCell's + * qualifier + * @param ts the timestamp to be seeked + * @param type the type to be seeked + * @return an int based on the given column hint + * TODO : To be moved out of here because this is a special API used in scan + * optimization. + */ + // compare a key against row/fam/qual/ts/type + public final int compareKeyBasedOnColHint(Cell nextIndexedCell, Cell currentCell, int foff, + int flen, byte[] colHint, int coff, int clen, long ts, byte type) { + int compare = compareRows(nextIndexedCell, currentCell); + if (compare != 0) { + return compare; + } + // If the column is not specified, the "minimum" key type appears the + // latest in the sorted order, regardless of the timestamp. This is used + // for specifying the last key/value in a given row, because there is no + // "lexicographically last column" (it would be infinitely long). The + // "maximum" key type does not need this behavior. + if (nextIndexedCell.getFamilyLength() + nextIndexedCell.getQualifierLength() == 0 + && nextIndexedCell.getTypeByte() == Type.Minimum.getCode()) { + // left is "bigger", i.e. it appears later in the sorted order + return 1; + } + if (flen + clen == 0 && type == Type.Minimum.getCode()) { + return -1; + } + + compare = compareFamilies(nextIndexedCell, currentCell); + if (compare != 0) { + return compare; + } + if (colHint == null) { + compare = compareQualifiers(nextIndexedCell, currentCell); + } else { + compare = compareQualifiers(nextIndexedCell, colHint, coff, clen); + } + if (compare != 0) { + return compare; + } + // Next compare timestamps. + compare = compareTimestamps(nextIndexedCell.getTimestamp(), ts); + if (compare != 0) { + return compare; + } + + // Compare types. Let the delete types sort ahead of puts; i.e. types + // of higher numbers sort before those of lesser numbers. Maximum (255) + // appears ahead of everything, and minimum (0) appears after + // everything. + return (0xff & type) - (0xff & nextIndexedCell.getTypeByte()); + } + + /** + * Compares timestamps in DESCENDING order. + * The below older timestamps sorting ahead of newer timestamps looks + * wrong but it is intentional. This way, newer timestamps are first + * found when we iterate over a memstore and newer versions are the + * first we trip over when reading from a store file. + * @return 1 if left timestamp < right timestamp + * -1 if left timestamp > right timestamp + * 0 if both timestamps are equal + */ + @Override + public int compareTimestamps(final long ltimestamp, final long rtimestamp) { + if (ltimestamp < rtimestamp) { + return 1; + } else if (ltimestamp > rtimestamp) { + return -1; + } + return 0; + } + + /** + * Compare cell's row against given comparator + * @param cell + * @param comparator + * @return result comparing cell's row + */ + public static int compareRow(Cell cell, ByteArrayComparable comparator) { + if (cell instanceof ByteBufferCell) { + return comparator.compareTo(((ByteBufferCell) cell).getRowByteBuffer(), + ((ByteBufferCell) cell).getRowPosition(), cell.getRowLength()); + } + return comparator.compareTo(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); + } + + /** + * Compare cell's column family against given comparator + * @param cell + * @param comparator + * @return result comparing cell's column family + */ + public static int compareFamily(Cell cell, ByteArrayComparable comparator) { + if (cell instanceof ByteBufferCell) { + return comparator.compareTo(((ByteBufferCell) cell).getFamilyByteBuffer(), + ((ByteBufferCell) cell).getFamilyPosition(), cell.getFamilyLength()); + } + return comparator.compareTo(cell.getFamilyArray(), cell.getFamilyOffset(), + cell.getFamilyLength()); + } + + /** + * Compare cell's qualifier against given comparator + * @param cell + * @param comparator + * @return result comparing cell's qualifier + */ + public static int compareQualifier(Cell cell, ByteArrayComparable comparator) { + if (cell instanceof ByteBufferCell) { + return comparator.compareTo(((ByteBufferCell) cell).getQualifierByteBuffer(), + ((ByteBufferCell) cell).getQualifierPosition(), cell.getQualifierLength()); + } + return comparator.compareTo(cell.getQualifierArray(), cell.getQualifierOffset(), + cell.getQualifierLength()); + } + + /** + * Compare cell's value against given comparator + * @param cell + * @param comparator + * @return result comparing cell's value + */ + public static int compareValue(Cell cell, ByteArrayComparable comparator) { + if (cell instanceof ByteBufferCell) { + return comparator.compareTo(((ByteBufferCell) cell).getValueByteBuffer(), + ((ByteBufferCell) cell).getValuePosition(), cell.getValueLength()); + } + return comparator.compareTo(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()); + } + + /** + * A {@link CellComparatorImpl} for hbase:meta catalog table + * {@link KeyValue}s. + */ + public static class MetaCellComparator extends CellComparatorImpl { + + @Override + public int compareRows(final Cell left, final Cell right) { + return compareRows(left.getRowArray(), left.getRowOffset(), left.getRowLength(), + right.getRowArray(), right.getRowOffset(), right.getRowLength()); + } + + @Override + public int compareRows(Cell left, byte[] right, int roffset, int rlength) { + return compareRows(left.getRowArray(), left.getRowOffset(), left.getRowLength(), right, + roffset, rlength); + } + + private int compareRows(byte[] left, int loffset, int llength, byte[] right, int roffset, + int rlength) { + int leftDelimiter = Bytes.searchDelimiterIndex(left, loffset, llength, HConstants.DELIMITER); + int rightDelimiter = Bytes + .searchDelimiterIndex(right, roffset, rlength, HConstants.DELIMITER); + // Compare up to the delimiter + int lpart = (leftDelimiter < 0 ? llength : leftDelimiter - loffset); + int rpart = (rightDelimiter < 0 ? rlength : rightDelimiter - roffset); + int result = Bytes.compareTo(left, loffset, lpart, right, roffset, rpart); + if (result != 0) { + return result; + } else { + if (leftDelimiter < 0 && rightDelimiter >= 0) { + return -1; + } else if (rightDelimiter < 0 && leftDelimiter >= 0) { + return 1; + } else if (leftDelimiter < 0 && rightDelimiter < 0) { + return 0; + } + } + // Compare middle bit of the row. + // Move past delimiter + leftDelimiter++; + rightDelimiter++; + int leftFarDelimiter = Bytes.searchDelimiterIndexInReverse(left, leftDelimiter, llength + - (leftDelimiter - loffset), HConstants.DELIMITER); + int rightFarDelimiter = Bytes.searchDelimiterIndexInReverse(right, rightDelimiter, rlength + - (rightDelimiter - roffset), HConstants.DELIMITER); + // Now compare middlesection of row. + lpart = (leftFarDelimiter < 0 ? llength + loffset : leftFarDelimiter) - leftDelimiter; + rpart = (rightFarDelimiter < 0 ? rlength + roffset : rightFarDelimiter) - rightDelimiter; + result = Bytes.compareTo(left, leftDelimiter, lpart, right, rightDelimiter, rpart); + if (result != 0) { + return result; + } else { + if (leftDelimiter < 0 && rightDelimiter >= 0) { + return -1; + } else if (rightDelimiter < 0 && leftDelimiter >= 0) { + return 1; + } else if (leftDelimiter < 0 && rightDelimiter < 0) { + return 0; + } + } + // Compare last part of row, the rowid. + leftFarDelimiter++; + rightFarDelimiter++; + result = Bytes.compareTo(left, leftFarDelimiter, llength - (leftFarDelimiter - loffset), + right, rightFarDelimiter, rlength - (rightFarDelimiter - roffset)); + return result; + } + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index 58ebc33..c0bb86c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -2241,7 +2241,7 @@ public final class CellUtil { } public static boolean matchingTimestamp(Cell a, Cell b) { - return CellComparator.compareTimestamps(a.getTimestamp(), b.getTimestamp()) == 0; + return CellComparatorImpl.COMPARATOR.compareTimestamps(a.getTimestamp(), b.getTimestamp()) == 0; } public static boolean matchingType(Cell a, Cell b) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index 66ff72a..a6a1737 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -28,7 +28,6 @@ import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; -import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -97,14 +96,14 @@ public class KeyValue implements ExtendedCell { /** * Comparator for plain key/values; i.e. non-catalog table key/values. Works on Key portion * of KeyValue only. - * @deprecated Use {@link CellComparator#COMPARATOR} instead. Deprecated for hbase 2.0, remove for hbase 3.0. + * @deprecated Use {@link CellComparatorImpl#COMPARATOR} instead. Deprecated for hbase 2.0, remove for hbase 3.0. */ @Deprecated public static final KVComparator COMPARATOR = new KVComparator(); /** * A {@link KVComparator} for hbase:meta catalog table * {@link KeyValue}s. - * @deprecated Use {@link CellComparator#META_COMPARATOR} instead. Deprecated for hbase 2.0, remove for hbase 3.0. + * @deprecated Use {@link CellComparatorImpl#META_COMPARATOR} instead. Deprecated for hbase 2.0, remove for hbase 3.0. */ @Deprecated public static final KVComparator META_COMPARATOR = new MetaComparator(); @@ -1608,7 +1607,7 @@ public class KeyValue implements ExtendedCell { /** * A {@link KVComparator} for hbase:meta catalog table * {@link KeyValue}s. - * @deprecated : {@link CellComparator#META_COMPARATOR} to be used. Deprecated for hbase 2.0, remove for hbase 3.0. + * @deprecated : {@link CellComparatorImpl#META_COMPARATOR} to be used. Deprecated for hbase 2.0, remove for hbase 3.0. */ @Deprecated public static class MetaComparator extends KVComparator { @@ -1618,7 +1617,7 @@ public class KeyValue implements ExtendedCell { */ @Override public int compare(final Cell left, final Cell right) { - return CellComparator.META_COMPARATOR.compareKeyIgnoresMvcc(left, right); + return CellComparatorImpl.META_COMPARATOR.compareKeyIgnoresMvcc(left, right); } @Override @@ -1723,7 +1722,7 @@ public class KeyValue implements ExtendedCell { * Compare KeyValues. When we compare KeyValues, we only compare the Key * portion. This means two KeyValues with same Key but different Values are * considered the same as far as this Comparator is concerned. - * @deprecated : Use {@link CellComparator}. Deprecated for hbase 2.0, remove for hbase 3.0. + * @deprecated : Use {@link CellComparatorImpl}. Deprecated for hbase 2.0, remove for hbase 3.0. */ @Deprecated public static class KVComparator implements RawComparator, SamePrefixComparator { @@ -1751,7 +1750,7 @@ public class KeyValue implements ExtendedCell { * @return 0 if equal, <0 if left smaller, >0 if right smaller */ protected int compareRowKey(final Cell left, final Cell right) { - return CellComparator.COMPARATOR.compareRows(left, right); + return CellComparatorImpl.COMPARATOR.compareRows(left, right); } /** @@ -1840,7 +1839,7 @@ public class KeyValue implements ExtendedCell { } public int compareOnlyKeyPortion(Cell left, Cell right) { - return CellComparator.COMPARATOR.compareKeyIgnoresMvcc(left, right); + return CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(left, right); } /** @@ -1849,12 +1848,12 @@ public class KeyValue implements ExtendedCell { */ @Override public int compare(final Cell left, final Cell right) { - int compare = CellComparator.COMPARATOR.compare(left, right); + int compare = CellComparatorImpl.COMPARATOR.compare(left, right); return compare; } public int compareTimestamps(final Cell left, final Cell right) { - return CellComparator.compareTimestamps(left, right); + return CellComparatorImpl.COMPARATOR.compareTimestamps(left, right); } /** @@ -1884,7 +1883,7 @@ public class KeyValue implements ExtendedCell { int compareColumns(final Cell left, final short lrowlength, final Cell right, final short rrowlength) { - return CellComparator.compareColumns(left, right); + return CellComparatorImpl.COMPARATOR.compareColumns(left, right); } protected int compareColumns( diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/AbstractDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/AbstractDataBlockEncoder.java index d3c622a..0a83543 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/AbstractDataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/AbstractDataBlockEncoder.java @@ -21,7 +21,7 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.ByteBufferKeyOnlyKeyValue; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.KeyValue; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.hfile.BlockType; @@ -62,9 +62,9 @@ public abstract class AbstractDataBlockEncoder implements DataBlockEncoder { protected abstract static class AbstractEncodedSeeker implements EncodedSeeker { protected HFileBlockDecodingContext decodingCtx; - protected final CellComparator comparator; + protected final CellComparatorImpl comparator; - public AbstractEncodedSeeker(CellComparator comparator, + public AbstractEncodedSeeker(CellComparatorImpl comparator, HFileBlockDecodingContext decodingCtx) { this.comparator = comparator; this.decodingCtx = decodingCtx; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java index bc905e5..04d7061 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java @@ -24,7 +24,7 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.ByteBufferCell; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HConstants; @@ -731,7 +731,7 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder { protected final ObjectIntPair tmpPair = new ObjectIntPair<>(); protected STATE current, previous; - public BufferedEncodedSeeker(CellComparator comparator, + public BufferedEncodedSeeker(CellComparatorImpl comparator, HFileBlockDecodingContext decodingCtx) { super(comparator, decodingCtx); if (decodingCtx.getHFileContext().isCompressTags()) { @@ -746,7 +746,7 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder { } @Override - public int compareKey(CellComparator comparator, Cell key) { + public int compareKey(CellComparatorImpl comparator, Cell key) { keyOnlyKV.setKey(current.keyBuffer, 0, current.keyLength); return comparator.compareKeyIgnoresMvcc(key, keyOnlyKV); } @@ -880,7 +880,7 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder { qualCommonPrefix); comp = compareCommonQualifierPrefix(seekCell, keyOnlyKV, qualCommonPrefix); if (comp == 0) { - comp = CellComparator.compareTimestamps(seekCell, keyOnlyKV); + comp = CellComparatorImpl.COMPARATOR.compareTimestamps(seekCell, keyOnlyKV); if (comp == 0) { // Compare types. Let the delete types sort ahead of puts; // i.e. types diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java index 1547310..8507b23 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.ByteBufferUtils; @@ -81,7 +81,7 @@ public class CopyKeyDataBlockEncoder extends BufferedDataBlockEncoder { } @Override - public EncodedSeeker createSeeker(CellComparator comparator, + public EncodedSeeker createSeeker(CellComparatorImpl comparator, final HFileBlockDecodingContext decodingCtx) { return new BufferedEncodedSeeker(comparator, decodingCtx) { @Override diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java index dbb6adb..d87e6eb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.nio.ByteBuff; @@ -100,7 +100,7 @@ public interface DataBlockEncoder { * @param decodingCtx * @return A newly created seeker. */ - EncodedSeeker createSeeker(CellComparator comparator, + EncodedSeeker createSeeker(CellComparatorImpl comparator, HFileBlockDecodingContext decodingCtx); /** @@ -193,6 +193,6 @@ public interface DataBlockEncoder { * @param key * @return -1 is the passed key is smaller than the current key, 0 if equal and 1 if greater */ - public int compareKey(CellComparator comparator, Cell key); + public int compareKey(CellComparatorImpl comparator, Cell key); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java index bd644c1..a8387b0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -382,7 +382,7 @@ public class DiffKeyDeltaEncoder extends BufferedDataBlockEncoder { } @Override - public EncodedSeeker createSeeker(CellComparator comparator, + public EncodedSeeker createSeeker(CellComparatorImpl comparator, HFileBlockDecodingContext decodingCtx) { return new BufferedEncodedSeeker(comparator, decodingCtx) { private byte[] familyNameWithSize; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java index 03cf768..132f82a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -397,7 +397,7 @@ public class FastDiffDeltaEncoder extends BufferedDataBlockEncoder { } @Override - public EncodedSeeker createSeeker(CellComparator comparator, + public EncodedSeeker createSeeker(CellComparatorImpl comparator, final HFileBlockDecodingContext decodingCtx) { return new BufferedEncodedSeeker(comparator, decodingCtx) { private void decode(boolean isFirst) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java index 8edb305..893479e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -195,7 +195,7 @@ public class PrefixKeyDeltaEncoder extends BufferedDataBlockEncoder { } @Override - public EncodedSeeker createSeeker(CellComparator comparator, + public EncodedSeeker createSeeker(CellComparatorImpl comparator, final HFileBlockDecodingContext decodingCtx) { return new BufferedEncodedSeeker(comparator, decodingCtx) { @Override diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexCodecV1.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexCodecV1.java index eb783cd..4301408 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexCodecV1.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexCodecV1.java @@ -24,7 +24,7 @@ import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.yetus.audience.InterfaceAudience; @@ -107,7 +107,7 @@ public class RowIndexCodecV1 extends AbstractDataBlockEncoder { dup.limit(sourceAsBuffer.position() + onDiskSize); return dup.slice(); } else { - RowIndexSeekerV1 seeker = new RowIndexSeekerV1(CellComparator.COMPARATOR, + RowIndexSeekerV1 seeker = new RowIndexSeekerV1(CellComparatorImpl.COMPARATOR, decodingCtx); seeker.setCurrentBuffer(new SingleByteBuff(sourceAsBuffer)); List kvs = new ArrayList<>(); @@ -142,7 +142,7 @@ public class RowIndexCodecV1 extends AbstractDataBlockEncoder { } @Override - public EncodedSeeker createSeeker(CellComparator comparator, + public EncodedSeeker createSeeker(CellComparatorImpl comparator, HFileBlockDecodingContext decodingCtx) { return new RowIndexSeekerV1(comparator, decodingCtx); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java index 61928a7..7fb2fb2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java @@ -16,7 +16,7 @@ import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.ByteArrayOutputStream; @@ -57,7 +57,7 @@ public class RowIndexEncoderV1 { throw new IOException("Key cannot be null or empty"); } if (lastCell != null) { - int keyComp = CellComparator.COMPARATOR.compareRows(lastCell, cell); + int keyComp = CellComparatorImpl.COMPARATOR.compareRows(lastCell, cell); if (keyComp > 0) { throw new IOException("Added a key not lexically larger than" + " previous. Current cell = " + cell + ", lastCell = " + lastCell); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java index 75fca82..da0abb4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java @@ -21,7 +21,7 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.ByteBufferCell; import org.apache.hadoop.hbase.ByteBufferKeyOnlyKeyValue; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -49,7 +49,7 @@ public class RowIndexSeekerV1 extends AbstractEncodedSeeker { private int rowNumber; private ByteBuff rowOffsets = null; - public RowIndexSeekerV1(CellComparator comparator, + public RowIndexSeekerV1(CellComparatorImpl comparator, HFileBlockDecodingContext decodingCtx) { super(comparator, decodingCtx); } @@ -243,7 +243,7 @@ public class RowIndexSeekerV1 extends AbstractEncodedSeeker { } @Override - public int compareKey(CellComparator comparator, Cell key) { + public int compareKey(CellComparatorImpl comparator, Cell key) { return comparator.compareKeyIgnoresMvcc(key, current.currentKey); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java index e36b1bb..094ac62 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java @@ -39,7 +39,7 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.KeyValue; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.io.RawComparator; @@ -2056,7 +2056,7 @@ public class Bytes implements Comparable { * means that this function can return 2N + 1 different values * ranging from -(N + 1) to N - 1. * @return the index of the block - * @deprecated Use {@link Bytes#binarySearch(Cell[], Cell, CellComparator)} + * @deprecated Use {@link Bytes#binarySearch(Cell[], Cell, CellComparatorImpl)} */ @Deprecated public static int binarySearch(byte[][] arr, Cell key, RawComparator comparator) { @@ -2096,7 +2096,7 @@ public class Bytes implements Comparable { * ranging from -(N + 1) to N - 1. * @return the index of the block */ - public static int binarySearch(Cell[] arr, Cell key, CellComparator comparator) { + public static int binarySearch(Cell[] arr, Cell key, CellComparatorImpl comparator) { int low = 0; int high = arr.length - 1; while (low <= high) { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java index 2831b93..c5ceffb 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java @@ -31,7 +31,7 @@ import org.junit.experimental.categories.Category; @Category({MiscTests.class, SmallTests.class}) public class TestCellComparator { - private CellComparator comparator = CellComparator.COMPARATOR; + private CellComparatorImpl comparator = CellComparatorImpl.COMPARATOR; byte[] row1 = Bytes.toBytes("row1"); byte[] row2 = Bytes.toBytes("row2"); byte[] row_1_0 = Bytes.toBytes("row10"); @@ -53,7 +53,7 @@ public class TestCellComparator { kv1 = new KeyValue(row1, fam2, qual1, val); kv2 = new KeyValue(row1, fam1, qual1, val); - assertTrue((CellComparator.compareFamilies(kv1, kv2) > 0)); + assertTrue((CellComparatorImpl.COMPARATOR.compareFamilies(kv1, kv2) > 0)); kv1 = new KeyValue(row1, fam1, qual1, 1l, val); kv2 = new KeyValue(row1, fam1, qual1, 2l, val); @@ -105,16 +105,16 @@ public class TestCellComparator { kv = new KeyValue(r2, f1, q1, v); buffer = ByteBuffer.wrap(kv.getBuffer()); Cell bbCell2 = new ByteBufferKeyValue(buffer, 0, buffer.remaining()); - assertEquals(0, CellComparator.compareColumns(bbCell1, bbCell2)); - assertEquals(0, CellComparator.compareColumns(bbCell1, kv)); + assertEquals(0, CellComparatorImpl.COMPARATOR.compareColumns(bbCell1, bbCell2)); + assertEquals(0, CellComparatorImpl.COMPARATOR.compareColumns(bbCell1, kv)); kv = new KeyValue(r2, f1, q2, v); buffer = ByteBuffer.wrap(kv.getBuffer()); Cell bbCell3 = new ByteBufferKeyValue(buffer, 0, buffer.remaining()); - assertEquals(0, CellComparator.compareFamilies(bbCell2, bbCell3)); - assertTrue(CellComparator.compareQualifiers(bbCell2, bbCell3) < 0); - assertTrue(CellComparator.compareColumns(bbCell2, bbCell3) < 0); + assertEquals(0, CellComparatorImpl.COMPARATOR.compareFamilies(bbCell2, bbCell3)); + assertTrue(CellComparatorImpl.COMPARATOR.compareQualifiers(bbCell2, bbCell3) < 0); + assertTrue(CellComparatorImpl.COMPARATOR.compareColumns(bbCell2, bbCell3) < 0); - assertEquals(0, CellComparator.COMPARATOR.compareRows(bbCell2, bbCell3)); - assertTrue(CellComparator.COMPARATOR.compareRows(bbCell1, bbCell2) < 0); + assertEquals(0, CellComparatorImpl.COMPARATOR.compareRows(bbCell2, bbCell3)); + assertTrue(CellComparatorImpl.COMPARATOR.compareRows(bbCell1, bbCell2) < 0); } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java index 562c008..3f0b8de 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java @@ -108,24 +108,24 @@ public class TestKeyValue extends TestCase { final byte [] qf = Bytes.toBytes("umn"); KeyValue aaa = new KeyValue(a, fam, qf, a); KeyValue bbb = new KeyValue(b, fam, qf, b); - assertTrue(CellComparator.COMPARATOR.compare(aaa, bbb) < 0); - assertTrue(CellComparator.COMPARATOR.compare(bbb, aaa) > 0); + assertTrue(CellComparatorImpl.COMPARATOR.compare(aaa, bbb) < 0); + assertTrue(CellComparatorImpl.COMPARATOR.compare(bbb, aaa) > 0); // Compare breaks if passed same ByteBuffer as both left and right arguments. - assertTrue(CellComparator.COMPARATOR.compare(bbb, bbb) == 0); - assertTrue(CellComparator.COMPARATOR.compare(aaa, aaa) == 0); + assertTrue(CellComparatorImpl.COMPARATOR.compare(bbb, bbb) == 0); + assertTrue(CellComparatorImpl.COMPARATOR.compare(aaa, aaa) == 0); // Do compare with different timestamps. aaa = new KeyValue(a, fam, qf, 1, a); bbb = new KeyValue(a, fam, qf, 2, a); - assertTrue(CellComparator.COMPARATOR.compare(aaa, bbb) > 0); - assertTrue(CellComparator.COMPARATOR.compare(bbb, aaa) < 0); - assertTrue(CellComparator.COMPARATOR.compare(aaa, aaa) == 0); + assertTrue(CellComparatorImpl.COMPARATOR.compare(aaa, bbb) > 0); + assertTrue(CellComparatorImpl.COMPARATOR.compare(bbb, aaa) < 0); + assertTrue(CellComparatorImpl.COMPARATOR.compare(aaa, aaa) == 0); // Do compare with different types. Higher numbered types -- Delete // should sort ahead of lower numbers; i.e. Put aaa = new KeyValue(a, fam, qf, 1, KeyValue.Type.Delete, a); bbb = new KeyValue(a, fam, qf, 1, a); - assertTrue(CellComparator.COMPARATOR.compare(aaa, bbb) < 0); - assertTrue(CellComparator.COMPARATOR.compare(bbb, aaa) > 0); - assertTrue(CellComparator.COMPARATOR.compare(aaa, aaa) == 0); + assertTrue(CellComparatorImpl.COMPARATOR.compare(aaa, bbb) < 0); + assertTrue(CellComparatorImpl.COMPARATOR.compare(bbb, aaa) > 0); + assertTrue(CellComparatorImpl.COMPARATOR.compare(aaa, aaa) == 0); } public void testMoreComparisons() throws Exception { @@ -136,7 +136,7 @@ public class TestKeyValue extends TestCase { Bytes.toBytes("TestScanMultipleVersions,row_0500,1236020145502"), now); KeyValue bbb = new KeyValue( Bytes.toBytes("TestScanMultipleVersions,,99999999999999"), now); - CellComparator c = CellComparator.META_COMPARATOR; + CellComparatorImpl c = CellComparatorImpl.META_COMPARATOR; assertTrue(c.compare(bbb, aaa) < 0); KeyValue aaaa = new KeyValue(Bytes.toBytes("TestScanMultipleVersions,,1236023996656"), @@ -151,13 +151,13 @@ public class TestKeyValue extends TestCase { Bytes.toBytes("info"), Bytes.toBytes("regioninfo"), 1236034574912L, (byte[])null); assertTrue(c.compare(x, y) < 0); - comparisons(CellComparator.META_COMPARATOR); - comparisons(CellComparator.COMPARATOR); - metacomparisons(CellComparator.META_COMPARATOR); + comparisons(CellComparatorImpl.META_COMPARATOR); + comparisons(CellComparatorImpl.COMPARATOR); + metacomparisons(CellComparatorImpl.META_COMPARATOR); } public void testMetaComparatorTableKeysWithCommaOk() { - CellComparator c = CellComparator.META_COMPARATOR; + CellComparatorImpl c = CellComparatorImpl.META_COMPARATOR; long now = System.currentTimeMillis(); // meta keys values are not quite right. A users can enter illegal values // from shell when scanning meta. @@ -178,17 +178,17 @@ public class TestKeyValue extends TestCase { Bytes.toBytes("fam"), Bytes.toBytes(""), Long.MAX_VALUE, (byte[])null); KeyValue rowB = new KeyValue(Bytes.toBytes("testtable,www.hbase.org/%20,99999"), Bytes.toBytes("fam"), Bytes.toBytes(""), Long.MAX_VALUE, (byte[])null); - assertTrue(CellComparator.META_COMPARATOR.compare(rowA, rowB) < 0); + assertTrue(CellComparatorImpl.META_COMPARATOR.compare(rowA, rowB) < 0); rowA = new KeyValue(Bytes.toBytes("testtable,,1234"), Bytes.toBytes("fam"), Bytes.toBytes(""), Long.MAX_VALUE, (byte[])null); rowB = new KeyValue(Bytes.toBytes("testtable,$www.hbase.org/,99999"), Bytes.toBytes("fam"), Bytes.toBytes(""), Long.MAX_VALUE, (byte[])null); - assertTrue(CellComparator.META_COMPARATOR.compare(rowA, rowB) < 0); + assertTrue(CellComparatorImpl.META_COMPARATOR.compare(rowA, rowB) < 0); } - private void metacomparisons(final CellComparator c) { + private void metacomparisons(final CellComparatorImpl c) { long now = System.currentTimeMillis(); assertTrue(c.compare(new KeyValue( Bytes.toBytes(TableName.META_TABLE_NAME.getNameAsString()+",a,,0,1"), now), @@ -205,7 +205,7 @@ public class TestKeyValue extends TestCase { Bytes.toBytes(TableName.META_TABLE_NAME.getNameAsString()+",a,,0,1"), now)) > 0); } - private void comparisons(final CellComparator c) { + private void comparisons(final CellComparatorImpl c) { long now = System.currentTimeMillis(); assertTrue(c.compare(new KeyValue( Bytes.toBytes(TableName.META_TABLE_NAME.getNameAsString()+",,1"), now), @@ -222,7 +222,7 @@ public class TestKeyValue extends TestCase { } public void testBinaryKeys() throws Exception { - Set set = new TreeSet<>(CellComparator.COMPARATOR); + Set set = new TreeSet<>(CellComparatorImpl.COMPARATOR); final byte [] fam = Bytes.toBytes("col"); final byte [] qf = Bytes.toBytes("umn"); final byte [] nb = new byte[0]; @@ -248,7 +248,7 @@ public class TestKeyValue extends TestCase { } assertTrue(assertion); // Make set with good comparator - set = new TreeSet<>(CellComparator.META_COMPARATOR); + set = new TreeSet<>(CellComparatorImpl.META_COMPARATOR); Collections.addAll(set, keys); count = 0; for (KeyValue k: set) { @@ -270,7 +270,7 @@ public class TestKeyValue extends TestCase { private final byte[] qualA = Bytes.toBytes("qfA"); private final byte[] qualB = Bytes.toBytes("qfB"); - private void assertKVLess(CellComparator c, + private void assertKVLess(CellComparatorImpl c, KeyValue less, KeyValue greater) { int cmp = c.compare(less,greater); @@ -279,7 +279,7 @@ public class TestKeyValue extends TestCase { assertTrue(cmp > 0); } - private void assertKVLessWithoutRow(CellComparator c, int common, KeyValue less, + private void assertKVLessWithoutRow(CellComparatorImpl c, int common, KeyValue less, KeyValue greater) { int cmp = c.compare(less, greater); assertTrue(cmp < 0); @@ -288,7 +288,7 @@ public class TestKeyValue extends TestCase { } public void testCompareWithoutRow() { - final CellComparator c = CellComparator.COMPARATOR; + final CellComparatorImpl c = CellComparatorImpl.COMPARATOR; byte[] row = Bytes.toBytes("row"); byte[] fa = Bytes.toBytes("fa"); @@ -335,7 +335,7 @@ public class TestKeyValue extends TestCase { } public void testFirstLastOnRow() { - final CellComparator c = CellComparator.COMPARATOR; + final CellComparatorImpl c = CellComparatorImpl.COMPARATOR; long ts = 1; byte[] bufferA = new byte[128]; int offsetA = 0; @@ -499,7 +499,7 @@ public class TestKeyValue extends TestCase { } public void testMetaKeyComparator() { - CellComparator c = CellComparator.META_COMPARATOR; + CellComparatorImpl c = CellComparatorImpl.META_COMPARATOR; long now = System.currentTimeMillis(); KeyValue a = new KeyValue(Bytes.toBytes("table1"), now); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java index 0f3ccb4..fb9205d 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java @@ -26,7 +26,7 @@ import java.util.Random; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Tag; @@ -287,7 +287,7 @@ public class RedundantKVGenerator { } } - Collections.sort(result, CellComparator.COMPARATOR); + Collections.sort(result, CellComparatorImpl.COMPARATOR); return result; } @@ -383,7 +383,7 @@ public class RedundantKVGenerator { } } - Collections.sort(result, CellComparator.COMPARATOR); + Collections.sort(result, CellComparatorImpl.COMPARATOR); return result; } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java index 246cb5b..ef26274 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java @@ -37,7 +37,7 @@ import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.KeyValue; @@ -84,7 +84,7 @@ public class IntegrationTestImportTsv extends Configured implements Tool { public TestName name = new TestName(); protected static final Set simple_expected = - new TreeSet(CellComparator.COMPARATOR) { + new TreeSet(CellComparatorImpl.COMPARATOR) { private static final long serialVersionUID = 1L; { byte[] family = Bytes.toBytes("d"); @@ -163,7 +163,7 @@ public class IntegrationTestImportTsv extends Configured implements Tool { assertTrue( format("Scan produced surprising result. expected: <%s>, actual: %s", expected, actual), - CellComparator.COMPARATOR.compare(expected, actual) == 0); + CellComparatorImpl.COMPARATOR.compare(expected, actual) == 0); } } assertFalse("Did not consume all expected values.", expectedIt.hasNext()); diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellSortReducer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellSortReducer.java index c33ee15..5c2b41f 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellSortReducer.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellSortReducer.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.util.TreeSet; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.MapReduceCell; @@ -42,7 +42,7 @@ public class CellSortReducer protected void reduce(ImmutableBytesWritable row, Iterable kvs, Reducer.Context context) throws java.io.IOException, InterruptedException { - TreeSet map = new TreeSet<>(CellComparator.COMPARATOR); + TreeSet map = new TreeSet<>(CellComparatorImpl.COMPARATOR); for (Cell kv : kvs) { try { map.add(CellUtil.deepClone(kv)); diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java index 20b2d42..55eb22f 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java @@ -46,7 +46,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionLocation; @@ -402,12 +402,12 @@ public class HFileOutputFormat2 wl.writer = new StoreFileWriter.Builder(conf, new CacheConfig(tempConf), fs) .withOutputDir(familydir).withBloomType(bloomType) - .withComparator(CellComparator.COMPARATOR).withFileContext(hFileContext).build(); + .withComparator(CellComparatorImpl.COMPARATOR).withFileContext(hFileContext).build(); } else { wl.writer = new StoreFileWriter.Builder(conf, new CacheConfig(tempConf), new HFileSystem(fs)) .withOutputDir(familydir).withBloomType(bloomType) - .withComparator(CellComparator.COMPARATOR).withFileContext(hFileContext) + .withComparator(CellComparatorImpl.COMPARATOR).withFileContext(hFileContext) .withFavoredNodes(favoredNodes).build(); } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java index 61835e4..acc3c62 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java @@ -40,7 +40,7 @@ import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.KeyValue; @@ -147,7 +147,7 @@ public class Import extends Configured implements Tool { @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="EQ_COMPARETO_USE_OBJECT_EQUALS", justification="This is wrong, yes, but we should be purging Writables, not fixing them") public int compareTo(CellWritableComparable o) { - return CellComparator.COMPARATOR.compare(this.kv, ((CellWritableComparable)o).kv); + return CellComparatorImpl.COMPARATOR.compare(this.kv, ((CellWritableComparable)o).kv); } public static class CellWritableComparator extends WritableComparator { diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java index bb935c3..6c36302 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java @@ -27,7 +27,7 @@ import java.util.TreeSet; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.Tag; @@ -77,7 +77,7 @@ public class PutSortReducer extends "putsortreducer.row.threshold", 1L * (1<<30)); Iterator iter = puts.iterator(); while (iter.hasNext()) { - TreeSet map = new TreeSet<>(CellComparator.COMPARATOR); + TreeSet map = new TreeSet<>(CellComparatorImpl.COMPARATOR); long curSize = 0; // stop at the end or the RAM threshold List tags = new ArrayList<>(); diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java index c72a0c3..3f5cc69 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java @@ -29,7 +29,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; @@ -52,7 +52,6 @@ import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterators; public class SyncTable extends Configured implements Tool { @@ -588,18 +587,18 @@ public class SyncTable extends Configured implements Tool { return -1; // target missing cell } - int result = CellComparator.compareFamilies(c1, c2); + int result = CellComparatorImpl.COMPARATOR.compareFamilies(c1, c2); if (result != 0) { return result; } - result = CellComparator.compareQualifiers(c1, c2); + result = CellComparatorImpl.COMPARATOR.compareQualifiers(c1, c2); if (result != 0) { return result; } // note timestamp comparison is inverted - more recent cells first - return CellComparator.compareTimestamps(c1, c2); + return CellComparatorImpl.COMPARATOR.compareTimestamps(c1, c2); } @Override diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java index 2aaa4eb..0f47032 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java @@ -27,7 +27,7 @@ import java.util.TreeSet; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.Tag; @@ -144,7 +144,7 @@ public class TextSortReducer extends "reducer.row.threshold", 1L * (1<<30)); Iterator iter = lines.iterator(); while (iter.hasNext()) { - Set kvs = new TreeSet<>(CellComparator.COMPARATOR); + Set kvs = new TreeSet<>(CellComparatorImpl.COMPARATOR); long curSize = 0; // stop at the end or the RAM threshold while (iter.hasNext() && curSize < threshold) { diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java index 4d8673c..e39afa8 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java @@ -25,9 +25,9 @@ import java.nio.ByteBuffer; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.CellComparator.MetaCellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl.MetaCellComparator; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory; import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher; @@ -153,7 +153,7 @@ public class PrefixTreeCodec implements DataBlockEncoder { * the way to this point. */ @Override - public EncodedSeeker createSeeker(CellComparator comparator, + public EncodedSeeker createSeeker(CellComparatorImpl comparator, HFileBlockDecodingContext decodingCtx) { if (comparator instanceof MetaCellComparator) { throw new IllegalArgumentException( diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java index 4fbb8a6..98608d6 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java @@ -22,7 +22,7 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.ByteBufferCell; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.SettableSequenceId; @@ -207,7 +207,7 @@ public class PrefixTreeSeeker implements EncodedSeeker { } @Override - public int compareKey(CellComparator comparator, Cell key) { + public int compareKey(CellComparatorImpl comparator, Cell key) { return comparator.compare(key, ptSearcher.current()); } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java index e63bb97..bda5b3d 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java @@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.codec.prefixtree.decode; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.ByteBufferCell; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.util.ObjectIntPair; public class PrefixTreeCell extends ByteBufferCell implements SettableSequenceId, Comparable { // Create a reference here? Can be removed too - protected CellComparator comparator = CellComparator.COMPARATOR; + protected CellComparatorImpl comparator = CellComparatorImpl.COMPARATOR; /********************** static **********************/ diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataNumberStrings.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataNumberStrings.java index 255376a..5c4d008 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataNumberStrings.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataNumberStrings.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.codec.prefixtree.row.data; import java.util.Collections; import java.util.List; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; @@ -50,7 +50,7 @@ public class TestRowDataNumberStrings extends BaseTestRowData{ d.add(new KeyValue(row, family, column, 0L, Type.Put, value)); } - Collections.sort(d, CellComparator.COMPARATOR); + Collections.sort(d, CellComparatorImpl.COMPARATOR); } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterBase.java index efc21c6..7f72151 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterBase.java @@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.io.hfile; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.BloomFilterBase; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; @InterfaceAudience.Private public class CompoundBloomFilterBase implements BloomFilterBase { @@ -51,7 +51,7 @@ public class CompoundBloomFilterBase implements BloomFilterBase { /** Hash function type to use, as defined in {@link org.apache.hadoop.hbase.util.Hash} */ protected int hashType; /** Comparator used to compare Bloom filter keys */ - protected CellComparator comparator; + protected CellComparatorImpl comparator; @Override public long getMaxKeys() { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java index 3242b73..57ed09a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java @@ -26,7 +26,7 @@ import java.util.Queue; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.yetus.audience.InterfaceAudience; @@ -98,7 +98,7 @@ public class CompoundBloomFilterWriter extends CompoundBloomFilterBase */ public CompoundBloomFilterWriter(int chunkByteSizeHint, float errorRate, int hashType, int maxFold, boolean cacheOnWrite, - CellComparator comparator, BloomType bloomType) { + CellComparatorImpl comparator, BloomType bloomType) { chunkByteSize = BloomFilterUtil.computeFoldableByteSize( chunkByteSizeHint * 8L, maxFold); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java index d82dd17..4c5cc14 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java @@ -27,8 +27,8 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.hbase.CellComparator; -import org.apache.hadoop.hbase.CellComparator.MetaCellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; +import org.apache.hadoop.hbase.CellComparatorImpl.MetaCellComparator; import org.apache.hadoop.hbase.KeyValue; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.compress.Compression; @@ -109,7 +109,7 @@ public class FixedFileTrailer { /** Raw key comparator class name in version 3 */ // We could write the actual class name from 2.0 onwards and handle BC - private String comparatorClassName = CellComparator.COMPARATOR.getClass().getName(); + private String comparatorClassName = CellComparatorImpl.COMPARATOR.getClass().getName(); /** The encryption key */ private byte[] encryptionKey; @@ -539,12 +539,12 @@ public class FixedFileTrailer { return minorVersion; } - public void setComparatorClass(Class klass) { + public void setComparatorClass(Class klass) { // Is the comparator instantiable? try { // If null, it should be the Bytes.BYTES_RAWCOMPARATOR if (klass != null) { - CellComparator comp = klass.newInstance(); + CellComparatorImpl comp = klass.newInstance(); // if the name wasn't one of the legacy names, maybe its a legit new // kind of comparator. comparatorClassName = klass.getName(); @@ -556,12 +556,12 @@ public class FixedFileTrailer { } @SuppressWarnings("unchecked") - private static Class getComparatorClass(String comparatorClassName) + private static Class getComparatorClass(String comparatorClassName) throws IOException { - Class comparatorKlass; + Class comparatorKlass; if (comparatorClassName.equals(KeyValue.COMPARATOR.getLegacyKeyComparatorName()) || comparatorClassName.equals(KeyValue.COMPARATOR.getClass().getName())) { - comparatorKlass = CellComparator.class; + comparatorKlass = CellComparatorImpl.class; } else if (comparatorClassName.equals(KeyValue.META_COMPARATOR.getLegacyKeyComparatorName()) || comparatorClassName.equals(KeyValue.META_COMPARATOR.getClass().getName())) { comparatorKlass = MetaCellComparator.class; @@ -573,7 +573,7 @@ public class FixedFileTrailer { } else { // if the name wasn't one of the legacy names, maybe its a legit new kind of comparator. try { - comparatorKlass = (Class) Class.forName(comparatorClassName); + comparatorKlass = (Class) Class.forName(comparatorClassName); } catch (ClassNotFoundException e) { throw new IOException(e); } @@ -581,10 +581,10 @@ public class FixedFileTrailer { return comparatorKlass; } - public static CellComparator createComparator( + public static CellComparatorImpl createComparator( String comparatorClassName) throws IOException { try { - Class comparatorClass = getComparatorClass(comparatorClassName); + Class comparatorClass = getComparatorClass(comparatorClassName); return comparatorClass != null ? comparatorClass.newInstance() : null; } catch (InstantiationException e) { throw new IOException("Comparator class " + comparatorClassName + @@ -595,7 +595,7 @@ public class FixedFileTrailer { } } - CellComparator createComparator() throws IOException { + CellComparatorImpl createComparator() throws IOException { expectAtLeastMajorVersion(2); return createComparator(comparatorClassName); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java index 35b741d..9969bf2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java @@ -48,7 +48,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.fs.HFileSystem; @@ -275,8 +275,8 @@ public class HFile { protected FileSystem fs; protected Path path; protected FSDataOutputStream ostream; - protected CellComparator comparator = - CellComparator.COMPARATOR; + protected CellComparatorImpl comparator = + CellComparatorImpl.COMPARATOR; protected InetSocketAddress[] favoredNodes; private HFileContext fileContext; protected boolean shouldDropBehind = false; @@ -300,7 +300,7 @@ public class HFile { return this; } - public WriterFactory withComparator(CellComparator comparator) { + public WriterFactory withComparator(CellComparatorImpl comparator) { Preconditions.checkNotNull(comparator); this.comparator = comparator; return this; @@ -427,7 +427,7 @@ public class HFile { */ String getName(); - CellComparator getComparator(); + CellComparatorImpl getComparator(); HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java index 24bc286..55125a8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java @@ -36,7 +36,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.hbase.ByteBufferKeyOnlyKeyValue; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KeyOnlyKeyValue; @@ -187,7 +187,7 @@ public class HFileBlockIndex { } @Override - public int rootBlockContainingKey(byte[] key, int offset, int length, CellComparator comp) { + public int rootBlockContainingKey(byte[] key, int offset, int length, CellComparatorImpl comp) { int pos = Bytes.binarySearch(blockKeys, key, offset, length); // pos is between -(blockKeys.length + 1) to blockKeys.length - 1, see // binarySearch's javadoc. @@ -241,15 +241,15 @@ public class HFileBlockIndex { /** Pre-computed mid-key */ private AtomicReference midKey = new AtomicReference<>(); /** Needed doing lookup on blocks. */ - private CellComparator comparator; + private CellComparatorImpl comparator; - public CellBasedKeyBlockIndexReader(final CellComparator c, final int treeLevel, + public CellBasedKeyBlockIndexReader(final CellComparatorImpl c, final int treeLevel, final CachingBlockReader cachingBlockReader) { this(c, treeLevel); this.cachingBlockReader = cachingBlockReader; } - public CellBasedKeyBlockIndexReader(final CellComparator c, final int treeLevel) { + public CellBasedKeyBlockIndexReader(final CellComparatorImpl c, final int treeLevel) { // Can be null for METAINDEX block comparator = c; searchTreeLevel = treeLevel; @@ -468,7 +468,7 @@ public class HFileBlockIndex { @Override public int rootBlockContainingKey(final byte[] key, int offset, int length, - CellComparator comp) { + CellComparatorImpl comp) { // This should always be called with Cell not with a byte[] key throw new UnsupportedOperationException("Cannot find for a key containing plain byte " + "array. Only cell based keys can be searched for"); @@ -648,7 +648,7 @@ public class HFileBlockIndex { // type Bytes.BYTES_RAWCOMPARATOR would be enough. For the ROW_COL bloom case we need the // CellComparator. public abstract int rootBlockContainingKey(final byte[] key, int offset, int length, - CellComparator comp); + CellComparatorImpl comp); /** * Finds the root-level index block containing the given key. @@ -726,7 +726,7 @@ public class HFileBlockIndex { * @throws IOException */ static int binarySearchNonRootIndex(Cell key, ByteBuff nonRootIndex, - CellComparator comparator) { + CellComparatorImpl comparator) { int numEntries = nonRootIndex.getIntAfterPosition(0); int low = 0; @@ -816,7 +816,7 @@ public class HFileBlockIndex { * */ static int locateNonRootIndexEntry(ByteBuff nonRootBlock, Cell key, - CellComparator comparator) { + CellComparatorImpl comparator) { int entryIndex = binarySearchNonRootIndex(key, nonRootBlock, comparator); if (entryIndex != -1) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java index 4524350..2889d31 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java @@ -53,7 +53,7 @@ import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; @@ -377,7 +377,7 @@ public class HFilePrettyPrinter extends Configured implements Tool { do { Cell cell = scanner.getCell(); if (row != null && row.length != 0) { - int result = CellComparator.COMPARATOR.compareRows(cell, row, 0, row.length); + int result = CellComparatorImpl.COMPARATOR.compareRows(cell, row, 0, row.length); if (result > 0) { break; } else if (result < 0) { @@ -406,7 +406,7 @@ public class HFilePrettyPrinter extends Configured implements Tool { } // check if rows are in order if (checkRow && pCell != null) { - if (CellComparator.COMPARATOR.compareRows(pCell, cell) > 0) { + if (CellComparatorImpl.COMPARATOR.compareRows(pCell, cell) > 0) { err.println("WARNING, previous row is greater then" + " current row\n\tfilename -> " + file + "\n\tprevious -> " + CellUtil.getCellKeyAsString(pCell) + "\n\tcurrent -> " @@ -422,7 +422,7 @@ public class HFilePrettyPrinter extends Configured implements Tool { + "\n\tfilename -> " + file + "\n\tkeyvalue -> " + CellUtil.getCellKeyAsString(cell)); } - if (pCell != null && CellComparator.compareFamilies(pCell, cell) != 0) { + if (pCell != null && CellComparatorImpl.COMPARATOR.compareFamilies(pCell, cell) != 0) { err.println("WARNING, previous kv has different family" + " compared to current key\n\tfilename -> " + file + "\n\tprevious -> " + CellUtil.getCellKeyAsString(pCell) @@ -604,7 +604,7 @@ public class HFilePrettyPrinter extends Configured implements Tool { public void collect(Cell cell) { valLen.update(cell.getValueLength()); if (prevCell != null && - CellComparator.COMPARATOR.compareRows(prevCell, cell) != 0) { + CellComparatorImpl.COMPARATOR.compareRows(prevCell, cell) != 0) { // new row collectRow(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java index 039f499..f9916ad 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java @@ -33,7 +33,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ByteBufferKeyOnlyKeyValue; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -104,7 +104,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { private int avgValueLen = -1; /** Key comparator */ - private CellComparator comparator = CellComparator.COMPARATOR; + private CellComparatorImpl comparator = CellComparatorImpl.COMPARATOR; /** Size of this file. */ private final long fileSize; @@ -421,7 +421,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { /** @return comparator */ @Override - public CellComparator getComparator() { + public CellComparatorImpl getComparator() { return comparator; } @@ -1226,7 +1226,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { return ByteBufferUtils.toStringBinary(getValue()); } - public int compareKey(CellComparator comparator, Cell key) { + public int compareKey(CellComparatorImpl comparator, Cell key) { blockBuffer.asSubByteBuffer(blockBuffer.position() + KEY_VALUE_LEN_SIZE, currKeyLen, pair); this.bufBackedKeyOnlyKv.setKey(pair.getFirst(), pair.getSecond(), currKeyLen); return comparator.compareKeyIgnoresMvcc(key, this.bufBackedKeyOnlyKv); @@ -1729,7 +1729,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { return seeker.seekToKeyInBlock(key, seekBefore); } - public int compareKey(CellComparator comparator, Cell key) { + public int compareKey(CellComparatorImpl comparator, Cell key) { return seeker.compareKey(comparator, key); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java index 5b25bed..00b93b1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java @@ -35,11 +35,11 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hbase.ByteBufferCell; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.hadoop.hbase.CellComparator.MetaCellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl.MetaCellComparator; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.crypto.Encryption; @@ -94,7 +94,7 @@ public class HFileWriterImpl implements HFile.Writer { protected long totalUncompressedBytes = 0; /** Key comparator. Used to ensure we write in order. */ - protected final CellComparator comparator; + protected final CellComparatorImpl comparator; /** Meta block names. */ protected List metaNames = new ArrayList<>(); @@ -165,7 +165,7 @@ public class HFileWriterImpl implements HFile.Writer { public HFileWriterImpl(final Configuration conf, CacheConfig cacheConf, Path path, FSDataOutputStream outputStream, - CellComparator comparator, HFileContext fileContext) { + CellComparatorImpl comparator, HFileContext fileContext) { this.outputStream = outputStream; this.path = path; this.name = path != null ? path.getName() : outputStream.toString(); @@ -176,7 +176,7 @@ public class HFileWriterImpl implements HFile.Writer { } else { this.blockEncoder = NoOpDataBlockEncoder.INSTANCE; } - this.comparator = comparator != null? comparator: CellComparator.COMPARATOR; + this.comparator = comparator != null? comparator: CellComparatorImpl.COMPARATOR; closeOutputStream = path != null; this.cacheConf = cacheConf; @@ -360,7 +360,7 @@ public class HFileWriterImpl implements HFile.Writer { * @param right * @return A cell that sorts between left and right. */ - public static Cell getMidpoint(final CellComparator comparator, final Cell left, + public static Cell getMidpoint(final CellComparatorImpl comparator, final Cell left, final Cell right) { // TODO: Redo so only a single pass over the arrays rather than one to // compare and then a second composing midpoint. @@ -399,7 +399,7 @@ public class HFileWriterImpl implements HFile.Writer { return CellUtil.createFirstOnRow(midRow); } // Rows are same. Compare on families. - diff = CellComparator.compareFamilies(left, right); + diff = comparator.compareFamilies(left, right); if (diff > 0) { throw new IllegalArgumentException("Left family sorts after right family; left=" + CellUtil.getCellKeyAsString(left) + ", right=" + CellUtil.getCellKeyAsString(right)); @@ -421,7 +421,7 @@ public class HFileWriterImpl implements HFile.Writer { return CellUtil.createFirstOnRowFamily(right, midRow, 0, midRow.length); } // Families are same. Compare on qualifiers. - diff = CellComparator.compareQualifiers(left, right); + diff = comparator.compareQualifiers(left, right); if (diff > 0) { throw new IllegalArgumentException("Left qualifier sorts after right qualifier; left=" + CellUtil.getCellKeyAsString(left) + ", right=" + CellUtil.getCellKeyAsString(right)); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java index e87cb3d..c66c571 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java @@ -42,7 +42,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -681,7 +681,7 @@ public final class MobUtils { StoreFileWriter w = new StoreFileWriter.Builder(conf, writerCacheConf, fs) .withFilePath(path) - .withComparator(CellComparator.COMPARATOR).withBloomType(bloomType) + .withComparator(CellComparatorImpl.COMPARATOR).withBloomType(bloomType) .withMaxKeyCount(maxKeyCount).withFileContext(hFileContext).build(); return w; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java index 1fc2902..92c7cef 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java @@ -48,7 +48,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; @@ -812,7 +812,7 @@ public class PartitionedMobCompactor extends MobCompactor { List scanners = StoreFileScanner.getScannersForStoreFiles(filesToCompact, false, true, false, false, HConstants.LATEST_TIMESTAMP); long ttl = HStore.determineTTLFromFamily(column); - ScanInfo scanInfo = new ScanInfo(conf, column, ttl, 0, CellComparator.COMPARATOR); + ScanInfo scanInfo = new ScanInfo(conf, column, ttl, 0, CellComparatorImpl.COMPARATOR); return new StoreScanner(scanInfo, scanType, scanners); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java index 4cb9ed1..cfce6cd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java @@ -27,7 +27,7 @@ import java.util.SortedSet; import org.apache.commons.logging.Log; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.UnexpectedStateException; @@ -44,7 +44,7 @@ public abstract class AbstractMemStore implements MemStore { private static final long NO_SNAPSHOT_ID = -1; private final Configuration conf; - private final CellComparator comparator; + private final CellComparatorImpl comparator; // active segment absorbs write operations protected volatile MutableSegment active; @@ -74,7 +74,7 @@ public abstract class AbstractMemStore implements MemStore { return order - 1; } - protected AbstractMemStore(final Configuration conf, final CellComparator c) { + protected AbstractMemStore(final Configuration conf, final CellComparatorImpl c) { this.conf = conf; this.comparator = c; resetActive(); @@ -300,7 +300,7 @@ public abstract class AbstractMemStore implements MemStore { */ protected abstract long heapSize(); - protected CellComparator getComparator() { + protected CellComparatorImpl getComparator() { return comparator; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellArrayImmutableSegment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellArrayImmutableSegment.java index a4b8c7b..35417c5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellArrayImmutableSegment.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellArrayImmutableSegment.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.regionserver; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.ClassSize; @@ -41,7 +41,7 @@ public class CellArrayImmutableSegment extends ImmutableSegment { * list of older ImmutableSegments. * The given iterator returns the Cells that "survived" the compaction. */ - protected CellArrayImmutableSegment(CellComparator comparator, MemStoreSegmentsIterator iterator, + protected CellArrayImmutableSegment(CellComparatorImpl comparator, MemStoreSegmentsIterator iterator, MemStoreLAB memStoreLAB, int numOfCells, MemStoreCompactor.Action action) { super(null, comparator, memStoreLAB); // initiailize the CellSet with NULL incSize(0, DEEP_OVERHEAD_CAM); @@ -125,7 +125,7 @@ public class CellArrayImmutableSegment extends ImmutableSegment { segmentScanner.close(); } // build the immutable CellSet - CellArrayMap cam = new CellArrayMap(CellComparator.COMPARATOR, cells, 0, idx, false); + CellArrayMap cam = new CellArrayMap(CellComparatorImpl.COMPARATOR, cells, 0, idx, false); this.setCellSet(oldCellSet, new CellSet(cam)); // update the CellSet of this Segment } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkImmutableSegment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkImmutableSegment.java index 62b62da..772e96c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkImmutableSegment.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkImmutableSegment.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.regionserver; import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.yetus.audience.InterfaceAudience; @@ -47,7 +47,7 @@ public class CellChunkImmutableSegment extends ImmutableSegment { * of a list of older ImmutableSegments. * The given iterator returns the Cells that "survived" the compaction. */ - protected CellChunkImmutableSegment(CellComparator comparator, MemStoreSegmentsIterator iterator, + protected CellChunkImmutableSegment(CellComparatorImpl comparator, MemStoreSegmentsIterator iterator, MemStoreLAB memStoreLAB, int numOfCells, MemStoreCompactor.Action action) { super(null, comparator, memStoreLAB); // initialize the CellSet with NULL incSize(0, DEEP_OVERHEAD_CCM); // initiate the heapSize with the size of the segment metadata @@ -124,7 +124,7 @@ public class CellChunkImmutableSegment extends ImmutableSegment { } // build the immutable CellSet CellChunkMap ccm = - new CellChunkMap(CellComparator.COMPARATOR,chunks,0,numOfCellsAfterCompaction,false); + new CellChunkMap(CellComparatorImpl.COMPARATOR,chunks,0,numOfCellsAfterCompaction,false); this.setCellSet(null, new CellSet(ccm)); // update the CellSet of this Segment } @@ -167,7 +167,7 @@ public class CellChunkImmutableSegment extends ImmutableSegment { segmentScanner.close(); } - CellChunkMap ccm = new CellChunkMap(CellComparator.COMPARATOR,chunks,0,numOfCells,false); + CellChunkMap ccm = new CellChunkMap(CellComparatorImpl.COMPARATOR,chunks,0,numOfCells,false); this.setCellSet(oldCellSet, new CellSet(ccm)); // update the CellSet of this Segment } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java index e16d961..e51b739 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java @@ -29,7 +29,7 @@ import java.util.SortedSet; import java.util.concurrent.ConcurrentSkipListMap; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.yetus.audience.InterfaceAudience; /** @@ -48,7 +48,7 @@ public class CellSet implements NavigableSet { // Otherwise, has same attributes as ConcurrentSkipListSet private final NavigableMap delegatee; /// - CellSet(final CellComparator c) { + CellSet(final CellComparatorImpl c) { this.delegatee = new ConcurrentSkipListMap<>(c); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java index 01138df..6a825c2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java @@ -29,7 +29,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MemoryCompactionPolicy; import org.apache.yetus.audience.InterfaceAudience; @@ -105,7 +105,7 @@ public class CompactingMemStore extends AbstractMemStore { + 2 * ClassSize.ATOMIC_BOOLEAN// inMemoryFlushInProgress and allowCompaction + CompactionPipeline.DEEP_OVERHEAD + MemStoreCompactor.DEEP_OVERHEAD); - public CompactingMemStore(Configuration conf, CellComparator c, + public CompactingMemStore(Configuration conf, CellComparatorImpl c, HStore store, RegionServicesForStores regionServices, MemoryCompactionPolicy compactionPolicy) throws IOException { super(conf, c); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java index 0d2608f..45effee 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java @@ -26,7 +26,7 @@ import java.util.SortedSet; import org.apache.commons.logging.Log; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.yetus.audience.InterfaceAudience; @@ -49,7 +49,7 @@ public class CompositeImmutableSegment extends ImmutableSegment { private long keySize = 0; - public CompositeImmutableSegment(CellComparator comparator, List segments) { + public CompositeImmutableSegment(CellComparatorImpl comparator, List segments) { super(comparator); this.segments = segments; this.timeRangeTracker = TimeRangeTracker.create(TimeRangeTracker.Type.SYNC); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java index 4539ed6..2b182e4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java @@ -22,7 +22,7 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext; @@ -54,7 +54,7 @@ public class DateTieredStoreEngine extends StoreEngine storeFileComparator; @@ -63,7 +63,7 @@ class DefaultStoreFileManager implements StoreFileManager { */ private volatile ImmutableList compactedfiles = ImmutableList.of(); - public DefaultStoreFileManager(CellComparator cellComparator, + public DefaultStoreFileManager(CellComparatorImpl cellComparator, Comparator storeFileComparator, Configuration conf, CompactionConfiguration comConf) { this.cellComparator = cellComparator; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java index 95bbf74..6ed5736 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java @@ -35,7 +35,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderType; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HConstants; @@ -167,7 +167,7 @@ public class HMobStore extends HStore { */ @Override protected StoreEngine createStoreEngine(HStore store, Configuration conf, - CellComparator cellComparator) throws IOException { + CellComparatorImpl cellComparator) throws IOException { MobStoreEngine engine = new MobStoreEngine(); engine.createComponents(conf, store, cellComparator); return engine; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 2d35fb9..21c3909 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -33,7 +33,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -81,6 +80,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; @@ -3713,7 +3713,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi } else if (result.size() == 1 && !valueIsNull) { Cell kv = result.get(0); cellTs = kv.getTimestamp(); - int compareResult = CellComparator.compareValue(kv, comparator); + int compareResult = CellComparatorImpl.compareValue(kv, comparator); matches = matches(op, compareResult); } // If matches put the new put or delete the new delete @@ -5792,7 +5792,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi protected final byte[] stopRow; protected final boolean includeStopRow; protected final HRegion region; - protected final CellComparator comparator; + protected final CellComparatorImpl comparator; private final long readPt; private final long maxResultSize; @@ -5818,7 +5818,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi } else { this.filter = null; } - this.comparator = region.getCellComparator(); + this.comparator = (CellComparatorImpl) region.getCellComparator(); /** * By default, calls to next/nextRaw must enforce the batch limit. Thus, construct a default * scanner context that can be used to enforce the batch limit in the event that a @@ -7500,7 +7500,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi // we won't be able to find the existing values if the cells are not specified in order by the // client since cells are in an array list. // TODO: I don't get why we are sorting. St.Ack 20150107 - sort(coordinates, store.getComparator()); + sort(coordinates, (CellComparatorImpl) store.getComparator()); Get get = new Get(mutation.getRow()); if (isolation != null) { get.setIsolationLevel(isolation); @@ -7518,7 +7518,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi /** * @return Sorted list of cells using comparator */ - private static List sort(List cells, final Comparator comparator) { + private static List sort(List cells, final CellComparatorImpl comparator) { Collections.sort(cells, comparator); return cells; } @@ -8056,8 +8056,8 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi @Override public CellComparator getCellComparator() { - return this.getRegionInfo().isMetaRegion() ? CellComparator.META_COMPARATOR - : CellComparator.COMPARATOR; + return this.getRegionInfo().isMetaRegion() ? CellComparatorImpl.META_COMPARATOR + : CellComparatorImpl.COMPARATOR; } public long getMemStoreFlushSize() { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index 92171d3..c4b87eb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -56,6 +56,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompoundConfiguration; import org.apache.hadoop.hbase.HConstants; @@ -192,7 +193,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat protected int bytesPerChecksum; // Comparing KeyValues - protected final CellComparator comparator; + protected final CellComparatorImpl comparator; final StoreEngine storeEngine; @@ -252,7 +253,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat this.dataBlockEncoder = new HFileDataBlockEncoderImpl(family.getDataBlockEncoding()); - this.comparator = region.getCellComparator(); + this.comparator = (CellComparatorImpl)region.getCellComparator(); // used by ScanQueryMatcher long timeToPurgeDeletes = Math.max(conf.getLong("hbase.hstore.time.to.purge.deletes", 0), 0); @@ -283,7 +284,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat default: className = DefaultMemStore.class.getName(); this.memstore = ReflectionUtils.instantiateWithCustomCtor(className, new Class[] { - Configuration.class, CellComparator.class }, new Object[] { conf, this.comparator }); + Configuration.class, CellComparatorImpl.class }, new Object[] { conf, this.comparator }); } LOG.info("Memstore class name is " + className); this.offPeakHours = OffPeakHours.getInstance(conf); @@ -343,7 +344,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat * @return StoreEngine to use. */ protected StoreEngine createStoreEngine(HStore store, Configuration conf, - CellComparator kvComparator) throws IOException { + CellComparatorImpl kvComparator) throws IOException { return StoreEngine.create(store, conf, comparator); } @@ -777,7 +778,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat + CellUtil.getCellKeyAsString(prevCell) + " current=" + CellUtil.getCellKeyAsString(cell)); } - if (CellComparator.compareFamilies(prevCell, cell) != 0) { + if (CellComparatorImpl.COMPARATOR.compareFamilies(prevCell, cell) != 0) { throw new InvalidHFileException("Previous key had different" + " family compared to current key: path=" + srcPath + " previous=" diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java index 5301922..fadfe3c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java @@ -32,7 +32,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HDFSBlocksDistribution; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.io.hfile.BlockType; @@ -137,7 +137,7 @@ public class HStoreFile implements StoreFile { private Optional lastKey; - private CellComparator comparator; + private CellComparatorImpl comparator; public CacheConfig getCacheConf() { return cacheConf; @@ -154,7 +154,7 @@ public class HStoreFile implements StoreFile { } @Override - public CellComparator getComparator() { + public CellComparatorImpl getComparator() { return comparator; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ImmutableSegment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ImmutableSegment.java index aacd189..56cb690 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ImmutableSegment.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ImmutableSegment.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.regionserver; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.io.TimeRange; @@ -52,7 +52,7 @@ public abstract class ImmutableSegment extends Segment { /**------------------------------------------------------------------------ * Empty C-tor to be used only for CompositeImmutableSegment */ - protected ImmutableSegment(CellComparator comparator) { + protected ImmutableSegment(CellComparatorImpl comparator) { super(comparator); this.timeRange = null; } @@ -60,7 +60,7 @@ public abstract class ImmutableSegment extends Segment { /**------------------------------------------------------------------------ * C-tor to be used to build the derived classes */ - protected ImmutableSegment(CellSet cs, CellComparator comparator, MemStoreLAB memStoreLAB) { + protected ImmutableSegment(CellSet cs, CellComparatorImpl comparator, MemStoreLAB memStoreLAB) { super(cs, comparator, memStoreLAB); this.timeRange = this.timeRangeTracker == null ? null : this.timeRangeTracker.toTimeRange(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java index 8073bfd..c0e9c94 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java @@ -30,7 +30,7 @@ import java.util.PriorityQueue; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState; @@ -77,7 +77,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner * @param comparator */ public KeyValueHeap(List scanners, - CellComparator comparator) throws IOException { + CellComparatorImpl comparator) throws IOException { this(scanners, new KVScannerComparator(comparator)); } @@ -180,12 +180,12 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner } protected static class KVScannerComparator implements Comparator { - protected CellComparator kvComparator; + protected CellComparatorImpl kvComparator; /** * Constructor * @param kvComparator */ - public KVScannerComparator(CellComparator kvComparator) { + public KVScannerComparator(CellComparatorImpl kvComparator) { this.kvComparator = kvComparator; } public int compare(KeyValueScanner left, KeyValueScanner right) { @@ -211,7 +211,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner /** * @return KVComparator */ - public CellComparator getComparator() { + public CellComparatorImpl getComparator() { return this.kvComparator; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.java index b3ba998..4ce309d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.java @@ -26,7 +26,7 @@ import java.util.List; import java.util.OptionalInt; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; @@ -46,7 +46,7 @@ public class MemStoreCompactorSegmentsIterator extends MemStoreSegmentsIterator // C-tor public MemStoreCompactorSegmentsIterator(List segments, - CellComparator comparator, int compactionKVMax, HStore store) throws IOException { + CellComparatorImpl comparator, int compactionKVMax, HStore store) throws IOException { super(compactionKVMax); List scanners = new ArrayList(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreMergerSegmentsIterator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreMergerSegmentsIterator.java index f43573e..228eaaf 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreMergerSegmentsIterator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreMergerSegmentsIterator.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.regionserver; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; @@ -42,7 +42,7 @@ public class MemStoreMergerSegmentsIterator extends MemStoreSegmentsIterator { private boolean closed = false; // C-tor - public MemStoreMergerSegmentsIterator(List segments, CellComparator comparator, + public MemStoreMergerSegmentsIterator(List segments, CellComparatorImpl comparator, int compactionKVMax) throws IOException { super(compactionKVMax); // create the list of scanners to traverse over all the data diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java index 856f40e..294ad14 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java @@ -22,7 +22,7 @@ import java.util.Iterator; import java.util.SortedSet; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -40,7 +40,7 @@ public class MutableSegment extends Segment { public final static long DEEP_OVERHEAD = Segment.DEEP_OVERHEAD + ClassSize.CONCURRENT_SKIPLISTMAP; - protected MutableSegment(CellSet cellSet, CellComparator comparator, MemStoreLAB memStoreLAB) { + protected MutableSegment(CellSet cellSet, CellComparatorImpl comparator, MemStoreLAB memStoreLAB) { super(cellSet, comparator, memStoreLAB); incSize(0,DEEP_OVERHEAD); // update the mutable segment metadata } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.java index 4d2ab9b..aeba75c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.java @@ -25,7 +25,7 @@ import org.apache.commons.lang3.NotImplementedException; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; /** @@ -44,7 +44,7 @@ public class ReversedKeyValueHeap extends KeyValueHeap { * @throws IOException */ public ReversedKeyValueHeap(List scanners, - CellComparator comparator) throws IOException { + CellComparatorImpl comparator) throws IOException { super(scanners, new ReversedKVScannerComparator(comparator)); } @@ -163,7 +163,7 @@ public class ReversedKeyValueHeap extends KeyValueHeap { * Constructor * @param kvComparator */ - public ReversedKVScannerComparator(CellComparator kvComparator) { + public ReversedKVScannerComparator(CellComparatorImpl kvComparator) { super(kvComparator); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedStoreScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedStoreScanner.java index 0089d3f..c1c7b8e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedStoreScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedStoreScanner.java @@ -23,7 +23,7 @@ import java.util.List; import java.util.NavigableSet; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Scan; @@ -60,7 +60,7 @@ class ReversedStoreScanner extends StoreScanner implements KeyValueScanner { @Override protected void resetKVHeap(List scanners, - CellComparator comparator) throws IOException { + CellComparatorImpl comparator) throws IOException { // Combine all seeked scanners with a heap heap = new ReversedKeyValueHeap(scanners, comparator); } @@ -97,7 +97,7 @@ class ReversedStoreScanner extends StoreScanner implements KeyValueScanner { @Override protected void checkScanOrder(Cell prevKV, Cell kv, - CellComparator comparator) throws IOException { + CellComparatorImpl comparator) throws IOException { // Check that the heap gives us KVs in an increasing order for same row and // decreasing order for different rows. assert prevKV == null || comparator == null || comparator.compareRows(kv, prevKV) < 0 diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanInfo.java index 8e48c69..45028ac 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanInfo.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanInfo.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.regionserver; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeepDeletedCells; @@ -43,7 +43,7 @@ public class ScanInfo { private long ttl; private KeepDeletedCells keepDeletedCells; private long timeToPurgeDeletes; - private CellComparator comparator; + private CellComparatorImpl comparator; private long tableMaxRowSize; private boolean usePread; private long cellsPerTimeoutCheck; @@ -65,7 +65,7 @@ public class ScanInfo { * @param comparator The store's comparator */ public ScanInfo(final Configuration conf, final ColumnFamilyDescriptor family, final long ttl, - final long timeToPurgeDeletes, final CellComparator comparator) { + final long timeToPurgeDeletes, final CellComparatorImpl comparator) { this(conf, family.getName(), family.getMinVersions(), family.getMaxVersions(), ttl, family.getKeepDeletedCells(), family.getBlocksize(), timeToPurgeDeletes, comparator, family.isNewVersionBehavior()); } @@ -84,7 +84,7 @@ public class ScanInfo { */ public ScanInfo(final Configuration conf, final byte[] family, final int minVersions, final int maxVersions, final long ttl, final KeepDeletedCells keepDeletedCells, - final long blockSize, final long timeToPurgeDeletes, final CellComparator comparator, + final long blockSize, final long timeToPurgeDeletes, final CellComparatorImpl comparator, final boolean newVersionBehavior) { this.family = family; this.minVersions = minVersions; @@ -152,7 +152,7 @@ public class ScanInfo { return timeToPurgeDeletes; } - public CellComparator getComparator() { + public CellComparatorImpl getComparator() { return comparator; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java index 23b386f..542f221 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java @@ -27,7 +27,7 @@ import java.util.concurrent.atomic.AtomicReference; import org.apache.commons.logging.Log; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -57,7 +57,7 @@ public abstract class Segment { + ClassSize.CELL_SET + 2 * ClassSize.ATOMIC_LONG + ClassSize.SYNC_TIMERANGE_TRACKER; private AtomicReference cellSet= new AtomicReference<>(); - private final CellComparator comparator; + private final CellComparatorImpl comparator; protected long minSequenceId; private MemStoreLAB memStoreLAB; // Sum of sizes of all Cells added to this Segment. Cell's heapSize is considered. This is not @@ -69,7 +69,7 @@ public abstract class Segment { // Empty constructor to be used when Segment is used as interface, // and there is no need in true Segments state - protected Segment(CellComparator comparator) { + protected Segment(CellComparatorImpl comparator) { this.comparator = comparator; this.dataSize = new AtomicLong(0); this.heapSize = new AtomicLong(0); @@ -77,7 +77,7 @@ public abstract class Segment { } // This constructor is used to create empty Segments. - protected Segment(CellSet cellSet, CellComparator comparator, MemStoreLAB memStoreLAB) { + protected Segment(CellSet cellSet, CellComparatorImpl comparator, MemStoreLAB memStoreLAB) { this.cellSet.set(cellSet); this.comparator = comparator; this.minSequenceId = Long.MAX_VALUE; @@ -271,7 +271,7 @@ public abstract class Segment { * Returns the Cell comparator used by this segment * @return the Cell comparator used by this segment */ - protected CellComparator getComparator() { + protected CellComparatorImpl getComparator() { return comparator; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SegmentFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SegmentFactory.java index 63d1baa..c8b1f5d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SegmentFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SegmentFactory.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.regionserver; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; @@ -43,14 +43,14 @@ public final class SegmentFactory { // create composite immutable segment from a list of segments // for snapshot consisting of multiple segments public CompositeImmutableSegment createCompositeImmutableSegment( - final CellComparator comparator, List segments) { + final CellComparatorImpl comparator, List segments) { return new CompositeImmutableSegment(comparator, segments); } // create new flat immutable segment from compacting old immutable segments // for compaction public ImmutableSegment createImmutableSegmentByCompaction(final Configuration conf, - final CellComparator comparator, MemStoreSegmentsIterator iterator, int numOfCells, + final CellComparatorImpl comparator, MemStoreSegmentsIterator iterator, int numOfCells, CompactingMemStore.IndexType idxType) throws IOException { @@ -62,7 +62,7 @@ public final class SegmentFactory { // create empty immutable segment // for initializations - public ImmutableSegment createImmutableSegment(CellComparator comparator) { + public ImmutableSegment createImmutableSegment(CellComparatorImpl comparator) { MutableSegment segment = generateMutableSegment(null, comparator, null); return createImmutableSegment(segment); } @@ -73,7 +73,7 @@ public final class SegmentFactory { } // create mutable segment - public MutableSegment createMutableSegment(final Configuration conf, CellComparator comparator) { + public MutableSegment createMutableSegment(final Configuration conf, CellComparatorImpl comparator) { MemStoreLAB memStoreLAB = MemStoreLAB.newInstance(conf); return generateMutableSegment(conf, comparator, memStoreLAB); } @@ -81,7 +81,7 @@ public final class SegmentFactory { // create new flat immutable segment from merging old immutable segments // for merge public ImmutableSegment createImmutableSegmentByMerge(final Configuration conf, - final CellComparator comparator, MemStoreSegmentsIterator iterator, int numOfCells, + final CellComparatorImpl comparator, MemStoreSegmentsIterator iterator, int numOfCells, List segments, CompactingMemStore.IndexType idxType) throws IOException { @@ -113,7 +113,7 @@ public final class SegmentFactory { //****** private methods to instantiate concrete store segments **********// - private ImmutableSegment createImmutableSegment(final Configuration conf, final CellComparator comparator, + private ImmutableSegment createImmutableSegment(final Configuration conf, final CellComparatorImpl comparator, MemStoreSegmentsIterator iterator, MemStoreLAB memStoreLAB, int numOfCells, MemStoreCompactor.Action action, CompactingMemStore.IndexType idxType) { @@ -132,7 +132,7 @@ public final class SegmentFactory { return res; } - private MutableSegment generateMutableSegment(final Configuration conf, CellComparator comparator, + private MutableSegment generateMutableSegment(final Configuration conf, CellComparatorImpl comparator, MemStoreLAB memStoreLAB) { // TBD use configuration to set type of segment CellSet set = new CellSet(comparator); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java index 6f6f31c..1d4a76b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java @@ -19,12 +19,11 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import java.util.Collection; -import java.util.Comparator; import java.util.OptionalDouble; import java.util.OptionalLong; import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; @@ -49,7 +48,7 @@ public interface Store { int NO_PRIORITY = Integer.MIN_VALUE; // General Accessors - Comparator getComparator(); + CellComparator getComparator(); Collection getStorefiles(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreEngine.java index 60b3c3d..c801d32 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreEngine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreEngine.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.util.List; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext; import org.apache.hadoop.hbase.regionserver.compactions.CompactionPolicy; import org.apache.hadoop.hbase.regionserver.compactions.Compactor; @@ -97,10 +97,10 @@ public abstract class StoreEngine create( - HStore store, Configuration conf, CellComparator cellComparator) throws IOException { + HStore store, Configuration conf, CellComparatorImpl cellComparator) throws IOException { String className = conf.get(STORE_ENGINE_CLASS_KEY, DEFAULT_STORE_ENGINE_CLASS.getName()); try { StoreEngine se = ReflectionUtils.instantiateWithCustomCtor( diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java index c11c8a3..9e318cd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java @@ -18,12 +18,12 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; -import java.util.Comparator; import java.util.Optional; import java.util.OptionalLong; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HDFSBlocksDistribution; import org.apache.yetus.audience.InterfaceAudience; @@ -54,7 +54,7 @@ public interface StoreFile { /** * Get the comparator for comparing two cells. */ - Comparator getComparator(); + CellComparator getComparator(); /** * Get max of the MemstoreTS in the KV's in this store file. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java index 67b8fbd..7d4aed0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java @@ -34,7 +34,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; @@ -132,7 +132,7 @@ public class StoreFileReader { this.shared = false; } - public CellComparator getComparator() { + public CellComparatorImpl getComparator() { return reader.getComparator(); } @@ -371,7 +371,7 @@ public class StoreFileReader { if (bloomFilterType == BloomType.ROW) { keyIsAfterLast = (Bytes.BYTES_RAWCOMPARATOR.compare(key, lastBloomKey) > 0); } else { - keyIsAfterLast = (CellComparator.COMPARATOR.compare(kvKey, lastBloomKeyOnlyKV)) > 0; + keyIsAfterLast = (CellComparatorImpl.COMPARATOR.compare(kvKey, lastBloomKeyOnlyKV)) > 0; } } @@ -384,7 +384,7 @@ public class StoreFileReader { // hbase:meta does not have blooms. So we need not have special interpretation // of the hbase:meta cells. We can safely use Bytes.BYTES_RAWCOMPARATOR for ROW Bloom if (keyIsAfterLast - && (CellComparator.COMPARATOR.compare(rowBloomKey, lastBloomKeyOnlyKV)) > 0) { + && (CellComparatorImpl.COMPARATOR.compare(rowBloomKey, lastBloomKeyOnlyKV)) > 0) { exists = false; } else { exists = diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java index f52eb39..d1db2c2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java @@ -31,7 +31,7 @@ import java.util.concurrent.atomic.LongAdder; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; @@ -433,7 +433,7 @@ public class StoreFileScanner implements KeyValueScanner { return reader; } - CellComparator getComparator() { + CellComparatorImpl getComparator() { return reader.getComparator(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java index b2da46f..29db1cb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java @@ -36,7 +36,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -98,7 +98,7 @@ public class StoreFileWriter implements CellSink, ShipperListener { * @throws IOException problem writing to FS */ StoreFileWriter(FileSystem fs, Path path, final Configuration conf, CacheConfig cacheConf, - final CellComparator comparator, BloomType bloomType, long maxKeys, + final CellComparatorImpl comparator, BloomType bloomType, long maxKeys, InetSocketAddress[] favoredNodes, HFileContext fileContext, boolean shouldDropCacheBehind) throws IOException { this(fs, path, conf, cacheConf, comparator, bloomType, maxKeys, favoredNodes, fileContext, @@ -123,7 +123,7 @@ public class StoreFileWriter implements CellSink, ShipperListener { private StoreFileWriter(FileSystem fs, Path path, final Configuration conf, CacheConfig cacheConf, - final CellComparator comparator, BloomType bloomType, long maxKeys, + final CellComparatorImpl comparator, BloomType bloomType, long maxKeys, InetSocketAddress[] favoredNodes, HFileContext fileContext, boolean shouldDropCacheBehind, final TimeRangeTracker trt) throws IOException { @@ -382,7 +382,7 @@ public class StoreFileWriter implements CellSink, ShipperListener { private final CacheConfig cacheConf; private final FileSystem fs; - private CellComparator comparator = CellComparator.COMPARATOR; + private CellComparatorImpl comparator = CellComparatorImpl.COMPARATOR; private BloomType bloomType = BloomType.NONE; private long maxKeyCount = 0; private Path dir; @@ -452,7 +452,7 @@ public class StoreFileWriter implements CellSink, ShipperListener { return this; } - public Builder withComparator(CellComparator comparator) { + public Builder withComparator(CellComparatorImpl comparator) { Preconditions.checkNotNull(comparator); this.comparator = comparator; return this; @@ -519,7 +519,7 @@ public class StoreFileWriter implements CellSink, ShipperListener { } if (comparator == null) { - comparator = CellComparator.COMPARATOR; + comparator = CellComparatorImpl.COMPARATOR; } return new StoreFileWriter(fs, filePath, conf, cacheConf, comparator, bloomType, maxKeyCount, favoredNodes, fileContext, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java index 588211c..d027235 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java @@ -31,7 +31,7 @@ import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HConstants; @@ -254,7 +254,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner this.storeOffset = scan.getRowOffsetPerColumnFamily(); addCurrentScanners(scanners); // Combine all seeked scanners with a heap - resetKVHeap(scanners, store.getComparator()); + resetKVHeap(scanners, (CellComparatorImpl)store.getComparator()); } catch (IOException e) { // remove us from the HStore#changedReaderObservers here or we'll have no chance to // and might cause memory leak @@ -318,7 +318,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner seekScanners(scanners, matcher.getStartKey(), false, parallelSeekEnabled); addCurrentScanners(scanners); // Combine all seeked scanners with a heap - resetKVHeap(scanners, store.getComparator()); + resetKVHeap(scanners, (CellComparatorImpl)store.getComparator()); } private void seekAllScanner(ScanInfo scanInfo, List scanners) @@ -407,7 +407,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner } protected void resetKVHeap(List scanners, - CellComparator comparator) throws IOException { + CellComparatorImpl comparator) throws IOException { // Combine all seeked scanners with a heap heap = new KeyValueHeap(scanners, comparator); } @@ -554,7 +554,8 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner } // Only do a sanity-check if store and comparator are available. - CellComparator comparator = store.map(s -> s.getComparator()).orElse(null); + CellComparatorImpl comparator = + store.map(s -> (CellComparatorImpl) s.getComparator()).orElse(null); int count = 0; long totalBytesRead = 0; @@ -901,7 +902,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner // add the newly created scanners on the flushed files and the current active memstore scanner addCurrentScanners(scanners); // Combine all seeked scanners with a heap - resetKVHeap(this.currentScanners, store.getComparator()); + resetKVHeap(this.currentScanners, (CellComparatorImpl) store.getComparator()); resetQueryMatcher(lastTop); if (heap.peek() == null || store.getComparator().compareRows(lastTop, this.heap.peek()) != 0) { LOG.info("Storescanner.peek() is changed where before = " + lastTop.toString() + @@ -936,7 +937,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner * @throws IOException */ protected void checkScanOrder(Cell prevKV, Cell kv, - CellComparator comparator) throws IOException { + CellComparatorImpl comparator) throws IOException { // Check that the heap gives us KVs in an increasing order. assert prevKV == null || comparator == null || comparator.compare(prevKV, kv) <= 0 : "Key " + prevKV @@ -1008,7 +1009,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner newCurrentScanners = new ArrayList<>(fileScanners.size() + memstoreScanners.size()); newCurrentScanners.addAll(fileScanners); newCurrentScanners.addAll(memstoreScanners); - newHeap = new KeyValueHeap(newCurrentScanners, store.getComparator()); + newHeap = new KeyValueHeap(newCurrentScanners, (CellComparatorImpl)store.getComparator()); } catch (Exception e) { LOG.warn("failed to switch to stream read", e); if (fileScanners != null) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java index 2ada5a9..eb641b9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java @@ -27,7 +27,7 @@ import java.util.OptionalLong; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.yetus.audience.InterfaceAudience; @@ -99,7 +99,7 @@ public class StoreUtils { * @param comparator Comparator used to compare KVs. * @return The split point row, or null if splitting is not possible, or reader is null. */ - static Optional getFileSplitPoint(HStoreFile file, CellComparator comparator) + static Optional getFileSplitPoint(HStoreFile file, CellComparatorImpl comparator) throws IOException { StoreFileReader reader = file.getReader(); if (reader == null) { @@ -131,7 +131,7 @@ public class StoreUtils { * Gets the mid point of the largest file passed in as split point. */ static Optional getSplitPoint(Collection storefiles, - CellComparator comparator) throws IOException { + CellComparatorImpl comparator) throws IOException { Optional largestFile = StoreUtils.getLargestFile(storefiles); return largestFile.isPresent() ? StoreUtils.getFileSplitPoint(largestFile.get(), comparator) : Optional.empty(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java index 0456980..4ee7a06 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java @@ -27,7 +27,7 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; @@ -41,14 +41,14 @@ public abstract class StripeMultiFileWriter extends AbstractMultiFileWriter { private static final Log LOG = LogFactory.getLog(StripeMultiFileWriter.class); - protected final CellComparator comparator; + protected final CellComparatorImpl comparator; protected List existingWriters; protected List boundaries; /** Whether to write stripe metadata */ private boolean doWriteStripeMetadata = true; - public StripeMultiFileWriter(CellComparator comparator) { + public StripeMultiFileWriter(CellComparatorImpl comparator) { this.comparator = comparator; } @@ -139,7 +139,7 @@ public abstract class StripeMultiFileWriter extends AbstractMultiFileWriter { * (because all files are included in compaction). majorRangeFrom is the left boundary. * @param majorRangeTo The right boundary of majorRange (see majorRangeFrom). */ - public BoundaryMultiWriter(CellComparator comparator, List targetBoundaries, + public BoundaryMultiWriter(CellComparatorImpl comparator, List targetBoundaries, byte[] majorRangeFrom, byte[] majorRangeTo) throws IOException { super(comparator); this.boundaries = targetBoundaries; @@ -275,7 +275,7 @@ public abstract class StripeMultiFileWriter extends AbstractMultiFileWriter { * @param left The left boundary of the first writer. * @param right The right boundary of the last writer. */ - public SizeMultiWriter(CellComparator comparator, int targetCount, long targetKvs, byte[] left, + public SizeMultiWriter(CellComparatorImpl comparator, int targetCount, long targetKvs, byte[] left, byte[] right) { super(comparator); this.targetCount = targetCount; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java index 8c26363..0d5f44f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java @@ -25,7 +25,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl; import org.apache.yetus.audience.InterfaceAudience; @@ -58,7 +58,7 @@ public class StripeStoreEngine extends StoreEngine targetBoundaries; /** @param targetBoundaries New files should be written with these boundaries. */ - public BoundaryStripeFlushRequest(CellComparator comparator, List targetBoundaries) { + public BoundaryStripeFlushRequest(CellComparatorImpl comparator, List targetBoundaries) { super(comparator); this.targetBoundaries = targetBoundaries; } @@ -166,7 +166,7 @@ public class StripeStoreFlusher extends StoreFlusher { * @param targetKvs The KV count of each segment. If targetKvs*targetCount is less than * total number of kvs, all the overflow data goes into the last stripe. */ - public SizeStripeFlushRequest(CellComparator comparator, int targetCount, long targetKvs) { + public SizeStripeFlushRequest(CellComparatorImpl comparator, int targetCount, long targetKvs) { super(comparator); this.targetCount = targetCount; this.targetKvs = targetKvs; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java index b6de678..11196d6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java @@ -29,7 +29,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.regionserver.HStoreFile; import org.apache.hadoop.hbase.regionserver.StoreConfigInformation; import org.apache.hadoop.hbase.regionserver.StoreUtils; @@ -85,7 +85,7 @@ public class StripeCompactionPolicy extends CompactionPolicy { request, OPEN_KEY, OPEN_KEY, targetKvsAndCount.getSecond(), targetKvsAndCount.getFirst()); } - public StripeStoreFlusher.StripeFlushRequest selectFlush(CellComparator comparator, + public StripeStoreFlusher.StripeFlushRequest selectFlush(CellComparatorImpl comparator, StripeInformationProvider si, int kvCount) { if (this.config.isUsingL0Flush()) { // L0 is used, return dumb request. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java index f4836a8..f59c2b0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java @@ -24,6 +24,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.ScanType; @@ -94,7 +95,8 @@ public class StripeCompactor extends AbstractMultiOutputCompactor= 0) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanDeleteTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanDeleteTracker.java index 8d848e5..dcffc9c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanDeleteTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanDeleteTracker.java @@ -24,7 +24,7 @@ import java.util.TreeSet; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -108,7 +108,7 @@ public class ScanDeleteTracker implements DeleteTracker { } if (deleteCell != null) { - int ret = -(CellComparator.compareQualifiers(cell, deleteCell)); + int ret = -(CellComparatorImpl.COMPARATOR.compareQualifiers(cell, deleteCell)); if (ret == 0) { if (deleteType == KeyValue.Type.DeleteColumn.getCode()) { return DeleteResult.COLUMN_DELETED; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java index 25346f1..2ce8edd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java @@ -22,7 +22,7 @@ import java.util.Iterator; import java.util.NavigableSet; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -113,7 +113,7 @@ public abstract class ScanQueryMatcher implements ShipperListener { } /** Row comparator for the region this query is for */ - protected final CellComparator rowComparator; + protected final CellComparatorImpl rowComparator; /** Key to seek to in memstore and StoreFiles */ protected final Cell startKey; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanWildcardColumnTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanWildcardColumnTracker.java index 0dab227..02267bf 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanWildcardColumnTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanWildcardColumnTracker.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.regionserver.querymatcher; import java.io.IOException; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValueUtil; @@ -84,7 +84,7 @@ public class ScanWildcardColumnTracker implements ColumnTracker { // do not count a delete marker as another version return checkVersion(type, timestamp); } - int cmp = CellComparator.compareQualifiers(cell, this.columnCell); + int cmp = CellComparatorImpl.COMPARATOR.compareQualifiers(cell, this.columnCell); if (cmp == 0) { if (ignoreCount) { return ScanQueryMatcher.MatchCode.INCLUDE; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java index f9374d8..0b798a8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java @@ -26,7 +26,7 @@ import java.util.Set; import java.util.TreeSet; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl; @@ -81,7 +81,7 @@ class FSWALEntry extends Entry { } else { return cells.stream() .filter(v -> !CellUtil.matchingFamily(v, WALEdit.METAFAMILY)) - .collect(toCollection(() -> new TreeSet<>(CellComparator::compareFamilies))) + .collect(toCollection(() -> new TreeSet<>(CellComparatorImpl.COMPARATOR::compareFamilies))) .stream() .map(CellUtil::cloneFamily) .collect(toCollection(() -> new TreeSet<>(Bytes.BYTES_COMPARATOR))); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java index 318ae57..84afa5c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java @@ -28,7 +28,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.regionserver.querymatcher.ScanDeleteTracker; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; @@ -246,7 +246,7 @@ public class VisibilityScanDeleteTracker extends ScanDeleteTracker { } } if (deleteCell != null) { - int ret = CellComparator.compareQualifiers(cell, deleteCell); + int ret = CellComparatorImpl.COMPARATOR.compareQualifiers(cell, deleteCell); if (ret == 0) { if (deleteType == KeyValue.Type.DeleteColumn.getCode()) { if (visibilityTagsDeleteColumns != null) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomContext.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomContext.java index 31394e8..94d5ee7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomContext.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomContext.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.util; import java.io.IOException; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.hfile.HFile; @@ -32,9 +32,9 @@ import org.apache.hadoop.hbase.io.hfile.HFile; public abstract class BloomContext { protected BloomFilterWriter bloomFilterWriter; - protected CellComparator comparator; + protected CellComparatorImpl comparator; - public BloomContext(BloomFilterWriter bloomFilterWriter, CellComparator comparator) { + public BloomContext(BloomFilterWriter bloomFilterWriter, CellComparatorImpl comparator) { this.bloomFilterWriter = bloomFilterWriter; this.comparator = comparator; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java index e8ee3ff..b4851bf 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java @@ -23,7 +23,7 @@ import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.CompoundBloomFilter; import org.apache.hadoop.hbase.io.hfile.CompoundBloomFilterBase; @@ -194,7 +194,7 @@ public final class BloomFilterFactory { // In case of compound Bloom filters we ignore the maxKeys hint. CompoundBloomFilterWriter bloomWriter = new CompoundBloomFilterWriter(getBloomBlockSize(conf), err, Hash.getHashType(conf), maxFold, cacheConf.shouldCacheBloomsOnWrite(), - bloomType == BloomType.ROWCOL ? CellComparator.COMPARATOR : null, bloomType); + bloomType == BloomType.ROWCOL ? CellComparatorImpl.COMPARATOR : null, bloomType); writer.addInlineBlockWriter(bloomWriter); return bloomWriter; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java index 3a4eb81..5016f4e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java @@ -26,7 +26,7 @@ import java.util.SortedSet; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.regionserver.NonReversedNonLazyKeyValueScanner; /** @@ -35,34 +35,34 @@ import org.apache.hadoop.hbase.regionserver.NonReversedNonLazyKeyValueScanner; @InterfaceAudience.Private public class CollectionBackedScanner extends NonReversedNonLazyKeyValueScanner { final private Iterable data; - final CellComparator comparator; + final CellComparatorImpl comparator; private Iterator iter; private Cell current; public CollectionBackedScanner(SortedSet set) { - this(set, CellComparator.COMPARATOR); + this(set, CellComparatorImpl.COMPARATOR); } public CollectionBackedScanner(SortedSet set, - CellComparator comparator) { + CellComparatorImpl comparator) { this.comparator = comparator; data = set; init(); } public CollectionBackedScanner(List list) { - this(list, CellComparator.COMPARATOR); + this(list, CellComparatorImpl.COMPARATOR); } public CollectionBackedScanner(List list, - CellComparator comparator) { + CellComparatorImpl comparator) { Collections.sort(list, comparator); this.comparator = comparator; data = list; init(); } - public CollectionBackedScanner(CellComparator comparator, + public CollectionBackedScanner(CellComparatorImpl comparator, Cell... array) { this.comparator = comparator; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java index a91c7fa..2f715df 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java @@ -30,7 +30,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -140,7 +140,7 @@ public class CompressionTest { scanner.seekTo(); // position to the start of file // Scanner does not do Cells yet. Do below for now till fixed. cc = scanner.getCell(); - if (CellComparator.COMPARATOR.compareRows(c, cc) != 0) { + if (CellComparatorImpl.COMPARATOR.compareRows(c, cc) != 0) { throw new Exception("Read back incorrect result: " + c.toString() + " vs " + cc.toString()); } } finally { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RowBloomContext.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RowBloomContext.java index 369aed7..f8ca6ee 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RowBloomContext.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RowBloomContext.java @@ -22,7 +22,7 @@ import static org.apache.hadoop.hbase.regionserver.HStoreFile.LAST_BLOOM_KEY; import java.io.IOException; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.io.hfile.HFile.Writer; import org.apache.yetus.audience.InterfaceAudience; @@ -33,7 +33,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class RowBloomContext extends BloomContext { - public RowBloomContext(BloomFilterWriter bloomFilterWriter, CellComparator comparator) { + public RowBloomContext(BloomFilterWriter bloomFilterWriter, CellComparatorImpl comparator) { super(bloomFilterWriter, comparator); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RowColBloomContext.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RowColBloomContext.java index 90cbcb0..ddffd1c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RowColBloomContext.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RowColBloomContext.java @@ -22,7 +22,7 @@ import static org.apache.hadoop.hbase.regionserver.HStoreFile.LAST_BLOOM_KEY; import java.io.IOException; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.io.hfile.HFile.Writer; import org.apache.yetus.audience.InterfaceAudience; @@ -34,7 +34,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class RowColBloomContext extends BloomContext { - public RowColBloomContext(BloomFilterWriter generalBloomFilterWriter, CellComparator comparator) { + public RowColBloomContext(BloomFilterWriter generalBloomFilterWriter, CellComparatorImpl comparator) { super(generalBloomFilterWriter, comparator); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 59ad6de..f961aab 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -3538,7 +3538,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { int i; for (i = 0; i < minLen - && CellComparator.COMPARATOR.compare(expected.get(i), actual.get(i)) == 0; + && CellComparatorImpl.COMPARATOR.compare(expected.get(i), actual.get(i)) == 0; ++i) {} if (additionalMsg == null) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java index b50a2b8..5992776 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java @@ -366,7 +366,7 @@ public class HFilePerformanceEvaluation { writer = HFile.getWriterFactoryNoCache(conf) .withPath(fs, mf) .withFileContext(hFileContext) - .withComparator(CellComparator.COMPARATOR) + .withComparator(CellComparatorImpl.COMPARATOR) .create(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MetaMockingUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MetaMockingUtil.java index a89237e..66c6f95 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MetaMockingUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MetaMockingUtil.java @@ -100,7 +100,7 @@ public class MetaMockingUtil { } //important: sort the kvs so that binary search work - Collections.sort(kvs, CellComparator.META_COMPARATOR); + Collections.sort(kvs, CellComparatorImpl.META_COMPARATOR); return Result.create(kvs); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.java index 5dee0e3..ac0a4e6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.java @@ -32,7 +32,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -292,7 +292,7 @@ public class TestAvoidCellReferencesIntoShippedBlocks { public boolean next(List outResult, ScannerContext scannerContext) throws IOException { boolean next = super.next(outResult, scannerContext); for (Cell cell : outResult) { - if(CellComparator.COMPARATOR.compareRows(cell, ROW2, 0, ROW2.length) == 0) { + if(CellComparatorImpl.COMPARATOR.compareRows(cell, ROW2, 0, ROW2.length) == 0) { try { // hold the compaction // set doscan to true diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java index 0e93ee0..8ca562b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java @@ -31,7 +31,7 @@ import junit.framework.TestCase; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; @@ -70,7 +70,7 @@ public class TestResult extends TestCase { */ public void testResultAsCellScanner() throws IOException { Cell [] cells = genKVs(row, family, value, 1, 10); - Arrays.sort(cells, CellComparator.COMPARATOR); + Arrays.sort(cells, CellComparatorImpl.COMPARATOR); Result r = Result.create(cells); assertSame(r, cells); // Assert I run over same result multiple times. @@ -92,7 +92,7 @@ public class TestResult extends TestCase { public void testBasicGetColumn() throws Exception { KeyValue [] kvs = genKVs(row, family, value, 1, 100); - Arrays.sort(kvs, CellComparator.COMPARATOR); + Arrays.sort(kvs, CellComparatorImpl.COMPARATOR); Result r = Result.create(kvs); @@ -114,7 +114,7 @@ public class TestResult extends TestCase { System.arraycopy(kvs1, 0, kvs, 0, kvs1.length); System.arraycopy(kvs2, 0, kvs, kvs1.length, kvs2.length); - Arrays.sort(kvs, CellComparator.COMPARATOR); + Arrays.sort(kvs, CellComparatorImpl.COMPARATOR); Result r = Result.create(kvs); for (int i = 0; i < 100; ++i) { @@ -131,7 +131,7 @@ public class TestResult extends TestCase { public void testBasicGetValue() throws Exception { KeyValue [] kvs = genKVs(row, family, value, 1, 100); - Arrays.sort(kvs, CellComparator.COMPARATOR); + Arrays.sort(kvs, CellComparatorImpl.COMPARATOR); Result r = Result.create(kvs); @@ -151,7 +151,7 @@ public class TestResult extends TestCase { System.arraycopy(kvs1, 0, kvs, 0, kvs1.length); System.arraycopy(kvs2, 0, kvs, kvs1.length, kvs2.length); - Arrays.sort(kvs, CellComparator.COMPARATOR); + Arrays.sort(kvs, CellComparatorImpl.COMPARATOR); Result r = Result.create(kvs); for (int i = 0; i < 100; ++i) { @@ -165,7 +165,7 @@ public class TestResult extends TestCase { public void testBasicLoadValue() throws Exception { KeyValue [] kvs = genKVs(row, family, value, 1, 100); - Arrays.sort(kvs, CellComparator.COMPARATOR); + Arrays.sort(kvs, CellComparatorImpl.COMPARATOR); Result r = Result.create(kvs); ByteBuffer loadValueBuffer = ByteBuffer.allocate(1024); @@ -190,7 +190,7 @@ public class TestResult extends TestCase { System.arraycopy(kvs1, 0, kvs, 0, kvs1.length); System.arraycopy(kvs2, 0, kvs, kvs1.length, kvs2.length); - Arrays.sort(kvs, CellComparator.COMPARATOR); + Arrays.sort(kvs, CellComparatorImpl.COMPARATOR); ByteBuffer loadValueBuffer = ByteBuffer.allocate(1024); @@ -273,7 +273,7 @@ public class TestResult extends TestCase { KeyValue [] kvs = genKVs(Bytes.toBytes(rowSB.toString()), family, Bytes.toBytes(valueSB.toString()), 1, n); - Arrays.sort(kvs, CellComparator.COMPARATOR); + Arrays.sort(kvs, CellComparatorImpl.COMPARATOR); ByteBuffer loadValueBuffer = ByteBuffer.allocate(1024); Result r = Result.create(kvs); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java index 16420d1..74503bf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java @@ -31,7 +31,7 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -154,7 +154,7 @@ public class TestDependentColumnFilter { for (boolean done = true; done; i++) { done = scanner.next(results); Arrays.sort(results.toArray(new Cell[results.size()]), - CellComparator.COMPARATOR); + CellComparatorImpl.COMPARATOR); LOG.info("counter=" + i + ", " + results); if (results.isEmpty()) break; cells += results.size(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java index 2624d95..6f392a8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java @@ -31,7 +31,7 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -1667,7 +1667,7 @@ public class TestFilter { for (boolean done = true; done; i++) { done = scanner.next(results); Arrays.sort(results.toArray(new Cell[results.size()]), - CellComparator.COMPARATOR); + CellComparatorImpl.COMPARATOR); LOG.info("counter=" + i + ", " + results); if (results.isEmpty()) break; assertTrue("Scanned too many rows! Only expected " + expectedRows + @@ -1689,7 +1689,7 @@ public class TestFilter { for (boolean done = true; done; i++) { done = scanner.next(results); Arrays.sort(results.toArray(new Cell[results.size()]), - CellComparator.COMPARATOR); + CellComparatorImpl.COMPARATOR); LOG.info("counter=" + i + ", " + results); if(results.isEmpty()) break; assertTrue("Scanned too many rows! Only expected " + expectedRows + @@ -1711,7 +1711,7 @@ public class TestFilter { for (boolean done = true; done; row++) { done = scanner.next(results); Arrays.sort(results.toArray(new Cell[results.size()]), - CellComparator.COMPARATOR); + CellComparatorImpl.COMPARATOR); if(results.isEmpty()) break; assertTrue("Scanned too many keys! Only expected " + kvs.length + " total but already scanned " + (results.size() + idx) + @@ -1742,7 +1742,7 @@ public class TestFilter { for (boolean more = true; more; row++) { more = scanner.next(results); Arrays.sort(results.toArray(new Cell[results.size()]), - CellComparator.COMPARATOR); + CellComparatorImpl.COMPARATOR); if(results.isEmpty()) break; assertTrue("Scanned too many keys! Only expected " + kvs.length + " total but already scanned " + (results.size() + idx) + diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java index a571712..c471bb4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java @@ -25,7 +25,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -490,7 +490,7 @@ public class TestFilterList { // Should take the min if given two hints FilterList filterList = new FilterList(Operator.MUST_PASS_ONE, Arrays.asList(new Filter [] { filterMinHint, filterMaxHint } )); - assertEquals(0, CellComparator.COMPARATOR.compare(filterList.getNextCellHint(null), + assertEquals(0, CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null), minKeyValue)); // Should have no hint if any filter has no hint @@ -505,7 +505,7 @@ public class TestFilterList { // Should give max hint if its the only one filterList = new FilterList(Operator.MUST_PASS_ONE, Arrays.asList(new Filter [] { filterMaxHint, filterMaxHint } )); - assertEquals(0, CellComparator.COMPARATOR.compare(filterList.getNextCellHint(null), + assertEquals(0, CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null), maxKeyValue)); // MUST PASS ALL @@ -514,13 +514,13 @@ public class TestFilterList { filterList = new FilterList(Operator.MUST_PASS_ALL, Arrays.asList(new Filter [] { filterMinHint, filterMaxHint } )); filterList.filterKeyValue(null); - assertEquals(0, CellComparator.COMPARATOR.compare(filterList.getNextCellHint(null), + assertEquals(0, CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null), maxKeyValue)); filterList = new FilterList(Operator.MUST_PASS_ALL, Arrays.asList(new Filter [] { filterMaxHint, filterMinHint } )); filterList.filterKeyValue(null); - assertEquals(0, CellComparator.COMPARATOR.compare(filterList.getNextCellHint(null), + assertEquals(0, CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null), maxKeyValue)); // Should have first hint even if a filter has no hint @@ -528,17 +528,17 @@ public class TestFilterList { Arrays.asList( new Filter [] { filterNoHint, filterMinHint, filterMaxHint } )); filterList.filterKeyValue(null); - assertEquals(0, CellComparator.COMPARATOR.compare(filterList.getNextCellHint(null), + assertEquals(0, CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null), maxKeyValue)); filterList = new FilterList(Operator.MUST_PASS_ALL, Arrays.asList(new Filter [] { filterNoHint, filterMaxHint } )); filterList.filterKeyValue(null); - assertEquals(0, CellComparator.COMPARATOR.compare(filterList.getNextCellHint(null), + assertEquals(0, CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null), maxKeyValue)); filterList = new FilterList(Operator.MUST_PASS_ALL, Arrays.asList(new Filter [] { filterNoHint, filterMinHint } )); filterList.filterKeyValue(null); - assertEquals(0, CellComparator.COMPARATOR.compare(filterList.getNextCellHint(null), + assertEquals(0, CellComparatorImpl.COMPARATOR.compare(filterList.getNextCellHint(null), minKeyValue)); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java index 6e9fef1..2d223dc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.filter; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; @@ -69,8 +69,8 @@ public class TestSingleColumnValueExcludeFilter { filter.filterRowCells(kvs); assertEquals("resultSize", kvs.size(), 2); - assertTrue("leftKV1", CellComparator.COMPARATOR.compare(kvs.get(0), kv) == 0); - assertTrue("leftKV2", CellComparator.COMPARATOR.compare(kvs.get(1), kv) == 0); + assertTrue("leftKV1", CellComparatorImpl.COMPARATOR.compare(kvs.get(0), kv) == 0); + assertTrue("leftKV2", CellComparatorImpl.COMPARATOR.compare(kvs.get(1), kv) == 0); assertFalse("allRemainingWhenMatch", filter.filterAllRemaining()); // A 'mismatch' situation diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java index 37d6b8f..57aa877 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java @@ -32,7 +32,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; @@ -164,7 +164,7 @@ public class TestHalfStoreFileReader { // Ugly code to get the item before the midkey KeyValue beforeMidKey = null; for (KeyValue item : items) { - if (CellComparator.COMPARATOR.compare(item, midKV) >= 0) { + if (CellComparatorImpl.COMPARATOR.compare(item, midKV) >= 0) { break; } beforeMidKey = item; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java index 45d1a36..4805e6d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java @@ -35,7 +35,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -209,7 +209,7 @@ public class TestDataBlockEncoders { .withIncludesTags(includesTags) .withCompression(Compression.Algorithm.NONE) .build(); - DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(CellComparator.COMPARATOR, + DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(CellComparatorImpl.COMPARATOR, encoder.newDataBlockDecodingContext(meta)); seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer)); encodedSeekers.add(seeker); @@ -285,14 +285,14 @@ public class TestDataBlockEncoders { .withIncludesTags(includesTags) .withCompression(Compression.Algorithm.NONE) .build(); - DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(CellComparator.COMPARATOR, + DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(CellComparatorImpl.COMPARATOR, encoder.newDataBlockDecodingContext(meta)); seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer)); int i = 0; do { KeyValue expectedKeyValue = sampleKv.get(i); Cell cell = seeker.getCell(); - if (CellComparator.COMPARATOR.compareKeyIgnoresMvcc(expectedKeyValue, cell) != 0) { + if (CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(expectedKeyValue, cell) != 0) { int commonPrefix = CellUtil .findCommonPrefixInFlatKey(expectedKeyValue, cell, false, true); fail(String.format("next() produces wrong results " @@ -326,7 +326,7 @@ public class TestDataBlockEncoders { getEncodingContext(Compression.Algorithm.NONE, encoding), this.useOffheapData); Cell key = encoder.getFirstKeyCellInBlock(new SingleByteBuff(encodedBuffer)); KeyValue firstKv = sampleKv.get(0); - if (0 != CellComparator.COMPARATOR.compareKeyIgnoresMvcc(key, firstKv)) { + if (0 != CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(key, firstKv)) { int commonPrefix = CellUtil.findCommonPrefixInFlatKey(key, firstKv, false, true); fail(String.format("Bug in '%s' commonPrefix %d", encoder.toString(), commonPrefix)); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java index decd39d..6732927 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java @@ -36,7 +36,7 @@ import java.util.concurrent.ConcurrentSkipListSet; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -73,7 +73,7 @@ public class TestPrefixTreeEncoding { private static final int NUM_COLS_PER_ROW = 20; private int numBatchesWritten = 0; - private ConcurrentSkipListSet kvset = new ConcurrentSkipListSet<>(CellComparator.COMPARATOR); + private ConcurrentSkipListSet kvset = new ConcurrentSkipListSet<>(CellComparatorImpl.COMPARATOR); private static boolean formatRowNum = false; @@ -113,7 +113,7 @@ public class TestPrefixTreeEncoding { DataOutputStream userDataStream = new DataOutputStream(baosInMemory); generateFixedTestData(kvset, batchId, false, includesTag, encoder, blkEncodingCtx, userDataStream); - EncodedSeeker seeker = encoder.createSeeker(CellComparator.COMPARATOR, + EncodedSeeker seeker = encoder.createSeeker(CellComparatorImpl.COMPARATOR, encoder.newDataBlockDecodingContext(meta)); byte[] onDiskBytes = baosInMemory.toByteArray(); ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE, @@ -156,7 +156,7 @@ public class TestPrefixTreeEncoding { DataBlockEncoding.PREFIX_TREE, new byte[0], meta); generateRandomTestData(kvset, numBatchesWritten++, includesTag, encoder, blkEncodingCtx, userDataStream); - EncodedSeeker seeker = encoder.createSeeker(CellComparator.COMPARATOR, + EncodedSeeker seeker = encoder.createSeeker(CellComparatorImpl.COMPARATOR, encoder.newDataBlockDecodingContext(meta)); byte[] onDiskBytes = baosInMemory.toByteArray(); ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE, @@ -166,7 +166,7 @@ public class TestPrefixTreeEncoding { do { Cell currentKV = seeker.getCell(); System.out.println(currentKV); - if (previousKV != null && CellComparator.COMPARATOR.compare(currentKV, previousKV) < 0) { + if (previousKV != null && CellComparatorImpl.COMPARATOR.compare(currentKV, previousKV) < 0) { dumpInputKVSet(); fail("Current kv " + currentKV + " is smaller than previous keyvalue " + previousKV); } @@ -194,7 +194,7 @@ public class TestPrefixTreeEncoding { HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext( DataBlockEncoding.PREFIX_TREE, new byte[0], meta); generateRandomTestData(kvset, batchId, includesTag, encoder, blkEncodingCtx, userDataStream); - EncodedSeeker seeker = encoder.createSeeker(CellComparator.COMPARATOR, + EncodedSeeker seeker = encoder.createSeeker(CellComparatorImpl.COMPARATOR, encoder.newDataBlockDecodingContext(meta)); byte[] onDiskBytes = baosInMemory.toByteArray(); ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE, @@ -217,7 +217,7 @@ public class TestPrefixTreeEncoding { ByteArrayOutputStream baosInMemory = new ByteArrayOutputStream(); DataOutputStream userDataStream = new DataOutputStream(baosInMemory); generateFixedTestData(kvset, batchId, includesTag, encoder, blkEncodingCtx, userDataStream); - EncodedSeeker seeker = encoder.createSeeker(CellComparator.COMPARATOR, + EncodedSeeker seeker = encoder.createSeeker(CellComparatorImpl.COMPARATOR, encoder.newDataBlockDecodingContext(meta)); byte[] onDiskBytes = baosInMemory.toByteArray(); ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE, @@ -244,7 +244,7 @@ public class TestPrefixTreeEncoding { fail("Get error result after seeking " + firstOnRow); } if (hasMoreOfEncodeScanner) { - if (CellComparator.COMPARATOR.compare(encodeSeeker.getCell(), + if (CellComparatorImpl.COMPARATOR.compare(encodeSeeker.getCell(), collectionScanner.peek()) != 0) { dumpInputKVSet(); fail("Expected " + collectionScanner.peek() + " actual " diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekToBlockWithEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekToBlockWithEncoders.java index 4a7f2eb..dfafdb8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekToBlockWithEncoders.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekToBlockWithEncoders.java @@ -25,7 +25,7 @@ import java.util.Collection; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -284,7 +284,7 @@ public class TestSeekToBlockWithEncoders { HFILEBLOCK_DUMMY_HEADER, meta); ByteBuffer encodedBuffer = TestDataBlockEncoders.encodeKeyValues(encoding, kvs, encodingContext, this.useOffheapData); - DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(CellComparator.COMPARATOR, + DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(CellComparatorImpl.COMPARATOR, encoder.newDataBlockDecodingContext(meta)); seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer)); encodedSeekers.add(seeker); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java index 950beec..58c9b65 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java @@ -40,7 +40,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ArrayBackedTag; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -372,7 +372,7 @@ public class TestCacheOnWrite { .withDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding()) .withIncludesTags(useTags).build(); StoreFileWriter sfw = new StoreFileWriter.Builder(conf, cacheConf, fs) - .withOutputDir(storeFileParentDir).withComparator(CellComparator.COMPARATOR) + .withOutputDir(storeFileParentDir).withComparator(CellComparatorImpl.COMPARATOR) .withFileContext(meta) .withBloomType(BLOOM_TYPE).withMaxKeyCount(NUM_KV).build(); byte[] cf = Bytes.toBytes("fam"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java index 6145eca..fcc09d5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java @@ -97,7 +97,7 @@ public class TestFixedFileTrailer { t.setLastDataBlockOffset(291); t.setNumDataIndexLevels(3); - t.setComparatorClass(CellComparator.COMPARATOR.getClass()); + t.setComparatorClass(CellComparatorImpl.COMPARATOR.getClass()); t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic. t.setUncompressedDataIndexSize(827398717L); // Something random. @@ -178,7 +178,7 @@ public class TestFixedFileTrailer { t.setEntryCount(((long) Integer.MAX_VALUE) + 1); t.setLastDataBlockOffset(291); t.setNumDataIndexLevels(3); - t.setComparatorClass(CellComparator.COMPARATOR.getClass()); + t.setComparatorClass(CellComparatorImpl.COMPARATOR.getClass()); t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic. t.setUncompressedDataIndexSize(827398717L); // Something random. t.setLoadOnOpenOffset(128); @@ -276,7 +276,7 @@ public class TestFixedFileTrailer { assertEquals(expected.getFirstDataBlockOffset(), loaded.getFirstDataBlockOffset()); assertTrue( - expected.createComparator() instanceof CellComparator); + expected.createComparator() instanceof CellComparatorImpl); assertEquals(expected.getUncompressedDataIndexSize(), loaded.getUncompressedDataIndexSize()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java index 13589fb..e3f2ad8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java @@ -41,7 +41,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -117,7 +117,7 @@ public class TestHFile { HFileContext meta = new HFileContextBuilder().withBlockSize(64 * 1024).build(); StoreFileWriter sfw = new StoreFileWriter.Builder(conf, fs).withOutputDir(storeFileParentDir) - .withComparator(CellComparator.COMPARATOR).withFileContext(meta).build(); + .withComparator(CellComparatorImpl.COMPARATOR).withFileContext(meta).build(); final int rowLen = 32; Random RNG = new Random(); @@ -319,7 +319,7 @@ public class TestHFile { Writer writer = HFile.getWriterFactory(conf, cacheConf) .withOutputStream(fout) .withFileContext(meta) - .withComparator(CellComparator.COMPARATOR) + .withComparator(CellComparatorImpl.COMPARATOR) .create(); LOG.info(writer); writeRecords(writer, useTags); @@ -486,72 +486,72 @@ public class TestHFile { 9, KeyValue.Type.Maximum.getCode(), HConstants.EMPTY_BYTE_ARRAY); - Cell mid = HFileWriterImpl.getMidpoint(CellComparator.COMPARATOR, left, right); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(left, mid) <= 0); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(mid, right) == 0); + Cell mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(left, mid) <= 0); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(mid, right) == 0); } @Test public void testGetShortMidpoint() { Cell left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a")); Cell right = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a")); - Cell mid = HFileWriterImpl.getMidpoint(CellComparator.COMPARATOR, left, right); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(left, mid) <= 0); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(mid, right) <= 0); + Cell mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(left, mid) <= 0); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(mid, right) <= 0); left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a")); right = CellUtil.createCell(Bytes.toBytes("b"), Bytes.toBytes("a"), Bytes.toBytes("a")); - mid = HFileWriterImpl.getMidpoint(CellComparator.COMPARATOR, left, right); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(left, mid) < 0); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(mid, right) <= 0); + mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(left, mid) < 0); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(mid, right) <= 0); left = CellUtil.createCell(Bytes.toBytes("g"), Bytes.toBytes("a"), Bytes.toBytes("a")); right = CellUtil.createCell(Bytes.toBytes("i"), Bytes.toBytes("a"), Bytes.toBytes("a")); - mid = HFileWriterImpl.getMidpoint(CellComparator.COMPARATOR, left, right); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(left, mid) < 0); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(mid, right) <= 0); + mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(left, mid) < 0); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(mid, right) <= 0); left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a")); right = CellUtil.createCell(Bytes.toBytes("bbbbbbb"), Bytes.toBytes("a"), Bytes.toBytes("a")); - mid = HFileWriterImpl.getMidpoint(CellComparator.COMPARATOR, left, right); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(left, mid) < 0); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(mid, right) < 0); + mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(left, mid) < 0); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(mid, right) < 0); assertEquals(1, mid.getRowLength()); left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a")); right = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("b"), Bytes.toBytes("a")); - mid = HFileWriterImpl.getMidpoint(CellComparator.COMPARATOR, left, right); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(left, mid) < 0); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(mid, right) <= 0); + mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(left, mid) < 0); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(mid, right) <= 0); left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a")); right = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("aaaaaaaa"), Bytes.toBytes("b")); - mid = HFileWriterImpl.getMidpoint(CellComparator.COMPARATOR, left, right); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(left, mid) < 0); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(mid, right) < 0); + mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(left, mid) < 0); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(mid, right) < 0); assertEquals(2, mid.getFamilyLength()); left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a")); right = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("aaaaaaaaa")); - mid = HFileWriterImpl.getMidpoint(CellComparator.COMPARATOR, left, right); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(left, mid) < 0); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(mid, right) < 0); + mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(left, mid) < 0); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(mid, right) < 0); assertEquals(2, mid.getQualifierLength()); left = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("a")); right = CellUtil.createCell(Bytes.toBytes("a"), Bytes.toBytes("a"), Bytes.toBytes("b")); - mid = HFileWriterImpl.getMidpoint(CellComparator.COMPARATOR, left, right); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(left, mid) < 0); - assertTrue(CellComparator.COMPARATOR.compareKeyIgnoresMvcc(mid, right) <= 0); + mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.COMPARATOR, left, right); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(left, mid) < 0); + assertTrue(CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(mid, right) <= 0); assertEquals(1, mid.getQualifierLength()); // Assert that if meta comparator, it returns the right cell -- i.e. no // optimization done. left = CellUtil.createCell(Bytes.toBytes("g"), Bytes.toBytes("a"), Bytes.toBytes("a")); right = CellUtil.createCell(Bytes.toBytes("i"), Bytes.toBytes("a"), Bytes.toBytes("a")); - mid = HFileWriterImpl.getMidpoint(CellComparator.META_COMPARATOR, left, right); - assertTrue(CellComparator.META_COMPARATOR.compareKeyIgnoresMvcc(left, mid) < 0); - assertTrue(CellComparator.META_COMPARATOR.compareKeyIgnoresMvcc(mid, right) == 0); + mid = HFileWriterImpl.getMidpoint(CellComparatorImpl.META_COMPARATOR, left, right); + assertTrue(CellComparatorImpl.META_COMPARATOR.compareKeyIgnoresMvcc(left, mid) < 0); + assertTrue(CellComparatorImpl.META_COMPARATOR.compareKeyIgnoresMvcc(mid, right) == 0); /** * See HBASE-7845 @@ -562,7 +562,7 @@ public class TestHFile { byte[] family = Bytes.toBytes("family"); byte[] qualA = Bytes.toBytes("qfA"); byte[] qualB = Bytes.toBytes("qfB"); - final CellComparator keyComparator = CellComparator.COMPARATOR; + final CellComparatorImpl keyComparator = CellComparatorImpl.COMPARATOR; // verify that faked shorter rowkey could be generated long ts = 5; KeyValue kv1 = new KeyValue(Bytes.toBytes("the quick brown fox"), family, qualA, ts, Type.Put); @@ -601,7 +601,7 @@ public class TestHFile { assertTrue(newKey.getTypeByte() == Type.Maximum.getCode()); // verify metaKeyComparator's getShortMidpointKey output - final CellComparator metaKeyComparator = CellComparator.META_COMPARATOR; + final CellComparatorImpl metaKeyComparator = CellComparatorImpl.META_COMPARATOR; kv1 = new KeyValue(Bytes.toBytes("ilovehbase123"), family, qualA, 5, Type.Put); kv2 = new KeyValue(Bytes.toBytes("ilovehbase234"), family, qualA, 0, Type.Put); newKey = HFileWriterImpl.getMidpoint(metaKeyComparator, kv1, kv2); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java index 334127c..8a2d721 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java @@ -46,7 +46,7 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -175,7 +175,7 @@ public class TestHFileBlock { // sort it and write to stream int totalSize = 0; - Collections.sort(keyValues, CellComparator.COMPARATOR); + Collections.sort(keyValues, CellComparatorImpl.COMPARATOR); for (KeyValue kv : keyValues) { totalSize += kv.getLength(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java index 82c0eca..7995fff 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java @@ -42,7 +42,7 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -213,7 +213,7 @@ public class TestHFileBlockIndex { BlockReaderWrapper brw = new BlockReaderWrapper(blockReader); HFileBlockIndex.BlockIndexReader indexReader = new HFileBlockIndex.CellBasedKeyBlockIndexReader( - CellComparator.COMPARATOR, numLevels, brw); + CellComparatorImpl.COMPARATOR, numLevels, brw); indexReader.readRootIndex(blockReader.blockRange(rootIndexOffset, fileSize).nextBlockWithBlockType(BlockType.ROOT_INDEX), numRootEntries); @@ -230,7 +230,7 @@ public class TestHFileBlockIndex { HFileBlock b = indexReader.seekToDataBlock(keyOnlyKey, null, true, true, false, null); - if (CellComparator.COMPARATOR.compare(keyOnlyKey, firstKeyInFile, + if (CellComparatorImpl.COMPARATOR.compare(keyOnlyKey, firstKeyInFile, 0, firstKeyInFile.length) < 0) { assertTrue(b == null); ++i; @@ -375,7 +375,7 @@ public class TestHFileBlockIndex { // Make sure the keys are increasing. for (int i = 0; i < keys.size() - 1; ++i) - assertTrue(CellComparator.COMPARATOR.compare( + assertTrue(CellComparatorImpl.COMPARATOR.compare( new KeyValue.KeyOnlyKeyValue(keys.get(i), 0, keys.get(i).length), new KeyValue.KeyOnlyKeyValue(keys.get(i + 1), 0, keys.get(i + 1).length)) < 0); @@ -414,7 +414,7 @@ public class TestHFileBlockIndex { KeyValue.KeyOnlyKeyValue cell = new KeyValue.KeyOnlyKeyValue( arrayHoldingKey, searchKey.length / 2, searchKey.length); int searchResult = BlockIndexReader.binarySearchNonRootIndex(cell, - new MultiByteBuff(nonRootIndex), CellComparator.COMPARATOR); + new MultiByteBuff(nonRootIndex), CellComparatorImpl.COMPARATOR); String lookupFailureMsg = "Failed to look up key #" + i + " (" + Bytes.toStringBinary(searchKey) + ")"; @@ -440,7 +440,7 @@ public class TestHFileBlockIndex { // higher-level API function.s boolean locateBlockResult = (BlockIndexReader.locateNonRootIndexEntry(new MultiByteBuff(nonRootIndex), cell, - CellComparator.COMPARATOR) != -1); + CellComparatorImpl.COMPARATOR) != -1); if (i == 0) { assertFalse(locateBlockResult); @@ -636,7 +636,7 @@ public class TestHFileBlockIndex { values[i] = CellUtil.cloneValue(kv); keyStrSet.add(Bytes.toStringBinary(k)); if (i > 0) { - assertTrue((CellComparator.COMPARATOR.compare(kv, keys[i - 1], + assertTrue((CellComparatorImpl.COMPARATOR.compare(kv, keys[i - 1], 0, keys[i - 1].length)) > 0); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java index 5f5cb74..e4a3908 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java @@ -42,7 +42,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RawLocalFileSystem; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.io.hfile.HFile.Reader; import org.apache.hadoop.hbase.io.hfile.HFile.Writer; @@ -137,7 +137,7 @@ public class TestHFileSeek extends TestCase { Writer writer = HFile.getWriterFactoryNoCache(conf) .withOutputStream(fout) .withFileContext(context) - .withComparator(CellComparator.COMPARATOR) + .withComparator(CellComparatorImpl.COMPARATOR) .create(); try { BytesWritable key = new BytesWritable(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java index 400c72f..015df54 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java @@ -38,7 +38,7 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -125,7 +125,7 @@ public class TestHFileWriterV3 { HFile.Writer writer = new HFile.WriterFactory(conf, new CacheConfig(conf)) .withPath(fs, hfilePath) .withFileContext(context) - .withComparator(CellComparator.COMPARATOR) + .withComparator(CellComparatorImpl.COMPARATOR) .create(); Random rand = new Random(9713312); // Just a fixed seed. @@ -179,7 +179,7 @@ public class TestHFileWriterV3 { HFileBlock.FSReader blockReader = new HFileBlock.FSReaderImpl(fsdis, fileSize, meta); // Comparator class name is stored in the trailer in version 3. - CellComparator comparator = trailer.createComparator(); + CellComparatorImpl comparator = trailer.createComparator(); HFileBlockIndex.BlockIndexReader dataBlockIndexReader = new HFileBlockIndex.CellBasedKeyBlockIndexReader(comparator, trailer.getNumDataIndexLevels()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java index dfa5ee8..70d283e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java @@ -25,7 +25,7 @@ import java.util.Random; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -111,7 +111,7 @@ public class TestPrefetch { .build(); StoreFileWriter sfw = new StoreFileWriter.Builder(conf, cacheConf, fs) .withOutputDir(storeFileParentDir) - .withComparator(CellComparator.COMPARATOR) + .withComparator(CellComparatorImpl.COMPARATOR) .withFileContext(meta) .build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java index b3cd8ee..8a20af7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java @@ -26,7 +26,7 @@ import java.util.List; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -66,7 +66,7 @@ public class TestReseekTo { .withOutputStream(fout) .withFileContext(context) // NOTE: This test is dependent on this deprecated nonstandard comparator - .withComparator(CellComparator.COMPARATOR) + .withComparator(CellComparatorImpl.COMPARATOR) .create(); int numberOfKeys = 1000; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java index f965323..5f8a6c3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java @@ -36,7 +36,7 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -122,7 +122,7 @@ public class TestSeekTo { Configuration conf = TEST_UTIL.getConfiguration(); HFile.Writer writer = HFile.getWriterFactoryNoCache(conf).withOutputStream(fout) .withFileContext(context) - .withComparator(CellComparator.COMPARATOR).create(); + .withComparator(CellComparatorImpl.COMPARATOR).create(); // 4 bytes * 3 * 2 for each key/value + // 3 for keys, 15 for values = 42 (woot) writer.append(toKV("c", tagUsage)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java index 6681a96..bf899ea 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java @@ -47,7 +47,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -882,7 +882,7 @@ public class TestPartitionedMobCompactor { false, true, false, false, HConstants.LATEST_TIMESTAMP)); long timeToPurgeDeletes = Math.max(conf.getLong("hbase.hstore.time.to.purge.deletes", 0), 0); long ttl = HStore.determineTTLFromFamily(hcd); - ScanInfo scanInfo = new ScanInfo(conf, hcd, ttl, timeToPurgeDeletes, CellComparator.COMPARATOR); + ScanInfo scanInfo = new ScanInfo(conf, hcd, ttl, timeToPurgeDeletes, CellComparatorImpl.COMPARATOR); StoreScanner scanner = new StoreScanner(scanInfo, ScanType.COMPACT_RETAIN_DELETES, scanners); List results = new ArrayList<>(); boolean hasMore = true; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java index a68e9f1..5de440d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java @@ -25,7 +25,7 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderType; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.ByteBufferKeyValue; @@ -334,6 +334,6 @@ public class TestProtobufUtil { ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(dbb, kv1.getLength(), kv2.getLength()); CellProtos.Cell cell = ProtobufUtil.toCell(offheapKV); Cell newOffheapKV = ProtobufUtil.toCell(ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), cell); - assertTrue(CellComparator.COMPARATOR.compare(offheapKV, newOffheapKV) == 0); + assertTrue(CellComparatorImpl.COMPARATOR.compare(offheapKV, newOffheapKV) == 0); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/KeyValueScanFixture.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/KeyValueScanFixture.java index 4e0f348..2a54cb1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/KeyValueScanFixture.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/KeyValueScanFixture.java @@ -23,7 +23,7 @@ import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.util.CollectionBackedScanner; @@ -34,14 +34,14 @@ import org.apache.hadoop.hbase.util.CollectionBackedScanner; * to be a store file scanner. */ public class KeyValueScanFixture extends CollectionBackedScanner { - public KeyValueScanFixture(CellComparator comparator, Cell... cells) { + public KeyValueScanFixture(CellComparatorImpl comparator, Cell... cells) { super(comparator, cells); } public static List scanFixture(KeyValue[] ... kvArrays) { ArrayList scanners = new ArrayList<>(); for (KeyValue [] kvs : kvArrays) { - scanners.add(new KeyValueScanFixture(CellComparator.COMPARATOR, kvs)); + scanners.add(new KeyValueScanFixture(CellComparatorImpl.COMPARATOR, kvs)); } return scanners; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellFlatSet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellFlatSet.java index 6307d32..cf425b0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellFlatSet.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellFlatSet.java @@ -29,7 +29,7 @@ import java.util.SortedSet; import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -112,9 +112,9 @@ public class TestCellFlatSet extends TestCase { lowerOuterCell = new KeyValue(Bytes.toBytes(10), f, q, 10, v); upperOuterCell = new KeyValue(Bytes.toBytes(50), f, q, 10, v); ascCells = new Cell[] {kv1,kv2,kv3,kv4}; - ascCbOnHeap = new CellArrayMap(CellComparator.COMPARATOR,ascCells,0,NUM_OF_CELLS,false); + ascCbOnHeap = new CellArrayMap(CellComparatorImpl.COMPARATOR,ascCells,0,NUM_OF_CELLS,false); descCells = new Cell[] {kv4,kv3,kv2,kv1}; - descCbOnHeap = new CellArrayMap(CellComparator.COMPARATOR,descCells,0,NUM_OF_CELLS,true); + descCbOnHeap = new CellArrayMap(CellComparatorImpl.COMPARATOR,descCells,0,NUM_OF_CELLS,true); CONF.setBoolean(MemStoreLAB.USEMSLAB_KEY, true); CONF.setFloat(MemStoreLAB.CHUNK_POOL_MAXSIZE_KEY, 0.2f); @@ -315,6 +315,6 @@ public class TestCellFlatSet extends TestCase { idxOffset = ByteBufferUtils.putLong(idxBuffer, idxOffset, kv.getSequenceId()); // seqId } - return new CellChunkMap(CellComparator.COMPARATOR,chunkArray,0,NUM_OF_CELLS,!asc); + return new CellChunkMap(CellComparatorImpl.COMPARATOR,chunkArray,0,NUM_OF_CELLS,!asc); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java index e0cc39f..4fa06b0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.regionserver; import junit.framework.TestCase; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.RegionServerTests; @@ -34,7 +34,7 @@ import java.util.SortedSet; @Category({RegionServerTests.class, SmallTests.class}) public class TestCellSkipListSet extends TestCase { private final CellSet csls = - new CellSet(CellComparator.COMPARATOR); + new CellSet(CellComparatorImpl.COMPARATOR); protected void setUp() throws Exception { super.setUp(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java index 0886fd1..e91520e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java @@ -74,7 +74,7 @@ public class TestCompactingMemStore extends TestDefaultMemStore { @Before public void setUp() throws Exception { compactingSetUp(); - this.memstore = new CompactingMemStore(HBaseConfiguration.create(), CellComparator.COMPARATOR, + this.memstore = new CompactingMemStore(HBaseConfiguration.create(), CellComparatorImpl.COMPARATOR, store, regionServicesForStores, MemoryCompactionPolicy.EAGER); } @@ -134,7 +134,7 @@ public class TestCompactingMemStore extends TestDefaultMemStore { // use case 3: first in snapshot second in kvset this.memstore = new CompactingMemStore(HBaseConfiguration.create(), - CellComparator.COMPARATOR, store, regionServicesForStores, + CellComparatorImpl.COMPARATOR, store, regionServicesForStores, MemoryCompactionPolicy.EAGER); this.memstore.add(kv1.clone(), null); // As compaction is starting in the background the repetition @@ -177,7 +177,7 @@ public class TestCompactingMemStore extends TestDefaultMemStore { Thread.sleep(1); addRows(this.memstore); Cell closestToEmpty = ((CompactingMemStore)this.memstore).getNextRow(KeyValue.LOWESTKEY); - assertTrue(CellComparator.COMPARATOR.compareRows(closestToEmpty, + assertTrue(CellComparatorImpl.COMPARATOR.compareRows(closestToEmpty, new KeyValue(Bytes.toBytes(0), System.currentTimeMillis())) == 0); for (int i = 0; i < ROW_COUNT; i++) { Cell nr = ((CompactingMemStore)this.memstore).getNextRow(new KeyValue(Bytes.toBytes(i), @@ -185,7 +185,7 @@ public class TestCompactingMemStore extends TestDefaultMemStore { if (i + 1 == ROW_COUNT) { assertEquals(nr, null); } else { - assertTrue(CellComparator.COMPARATOR.compareRows(nr, + assertTrue(CellComparatorImpl.COMPARATOR.compareRows(nr, new KeyValue(Bytes.toBytes(i + 1), System.currentTimeMillis())) == 0); } } @@ -203,7 +203,7 @@ public class TestCompactingMemStore extends TestDefaultMemStore { Cell left = results.get(0); byte[] row1 = Bytes.toBytes(rowId); assertTrue("Row name", - CellComparator.COMPARATOR.compareRows(left, row1, 0, row1.length) == 0); + CellComparatorImpl.COMPARATOR.compareRows(left, row1, 0, row1.length) == 0); assertEquals("Count of columns", QUALIFIER_COUNT, results.size()); List row = new ArrayList<>(); for (Cell kv : results) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java index 3fa5cd0..cf7b9c9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java @@ -76,7 +76,7 @@ public class TestCompactingToCellFlatMapMemStore extends TestCompactingMemStore String.valueOf(MemoryCompactionPolicy.EAGER)); this.memstore = - new CompactingMemStore(conf, CellComparator.COMPARATOR, store, + new CompactingMemStore(conf, CellComparatorImpl.COMPARATOR, store, regionServicesForStores, MemoryCompactionPolicy.EAGER); } @@ -386,7 +386,7 @@ public class TestCompactingToCellFlatMapMemStore extends TestCompactingMemStore // Just doing the cnt operation here MemStoreSegmentsIterator itr = new MemStoreMergerSegmentsIterator( ((CompactingMemStore) memstore).getImmutableSegments().getStoreSegments(), - CellComparator.COMPARATOR, 10); + CellComparatorImpl.COMPARATOR, 10); int cnt = 0; try { while (itr.next() != null) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java index d8d38fa..58c76ca 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java @@ -37,7 +37,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; @@ -145,7 +145,7 @@ public class TestCompoundBloomFilter { List kvList = new ArrayList<>(n); for (int i = 0; i < n; ++i) kvList.add(RandomKeyValueUtil.randomKeyValue(rand)); - Collections.sort(kvList, CellComparator.COMPARATOR); + Collections.sort(kvList, CellComparatorImpl.COMPARATOR); return kvList; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java index eb9efab..0a974ae 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java @@ -36,7 +36,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -542,7 +542,7 @@ public class TestDefaultMemStore { @Test public void testMultipleVersionsSimple() throws Exception { - DefaultMemStore m = new DefaultMemStore(new Configuration(), CellComparator.COMPARATOR); + DefaultMemStore m = new DefaultMemStore(new Configuration(), CellComparatorImpl.COMPARATOR); byte [] row = Bytes.toBytes("testRow"); byte [] family = Bytes.toBytes("testFamily"); byte [] qf = Bytes.toBytes("testQualifier"); @@ -575,7 +575,7 @@ public class TestDefaultMemStore { Thread.sleep(1); addRows(this.memstore); Cell closestToEmpty = ((DefaultMemStore) this.memstore).getNextRow(KeyValue.LOWESTKEY); - assertTrue(CellComparator.COMPARATOR.compareRows(closestToEmpty, + assertTrue(CellComparatorImpl.COMPARATOR.compareRows(closestToEmpty, new KeyValue(Bytes.toBytes(0), System.currentTimeMillis())) == 0); for (int i = 0; i < ROW_COUNT; i++) { Cell nr = ((DefaultMemStore) this.memstore).getNextRow(new KeyValue(Bytes.toBytes(i), @@ -583,7 +583,7 @@ public class TestDefaultMemStore { if (i + 1 == ROW_COUNT) { assertEquals(nr, null); } else { - assertTrue(CellComparator.COMPARATOR.compareRows(nr, + assertTrue(CellComparatorImpl.COMPARATOR.compareRows(nr, new KeyValue(Bytes.toBytes(i + 1), System.currentTimeMillis())) == 0); } } @@ -602,7 +602,7 @@ public class TestDefaultMemStore { Cell left = results.get(0); byte[] row1 = Bytes.toBytes(rowId); assertTrue("Row name", - CellComparator.COMPARATOR.compareRows(left, row1, 0, row1.length) == 0); + CellComparatorImpl.COMPARATOR.compareRows(left, row1, 0, row1.length) == 0); assertEquals("Count of columns", QUALIFIER_COUNT, results.size()); List row = new ArrayList<>(); for (Cell kv : results) { @@ -825,7 +825,7 @@ public class TestDefaultMemStore { @Test public void testUpsertMemstoreSize() throws Exception { Configuration conf = HBaseConfiguration.create(); - memstore = new DefaultMemStore(conf, CellComparator.COMPARATOR); + memstore = new DefaultMemStore(conf, CellComparatorImpl.COMPARATOR); MemStoreSize oldSize = memstore.size(); List l = new ArrayList<>(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultStoreEngine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultStoreEngine.java index df5e97a..a1a6022 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultStoreEngine.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultStoreEngine.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.regionserver; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor; import org.apache.hadoop.hbase.regionserver.compactions.RatioBasedCompactionPolicy; @@ -60,7 +60,7 @@ public class TestDefaultStoreEngine { conf.set(DefaultStoreEngine.DEFAULT_STORE_FLUSHER_CLASS_KEY, DummyStoreFlusher.class.getName()); HStore mockStore = Mockito.mock(HStore.class); - StoreEngine se = StoreEngine.create(mockStore, conf, CellComparator.COMPARATOR); + StoreEngine se = StoreEngine.create(mockStore, conf, CellComparatorImpl.COMPARATOR); Assert.assertTrue(se instanceof DefaultStoreEngine); Assert.assertTrue(se.getCompactionPolicy() instanceof DummyCompactionPolicy); Assert.assertTrue(se.getStoreFlusher() instanceof DummyStoreFlusher); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java index 066e686..38d038f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java @@ -42,7 +42,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -228,7 +228,7 @@ public class TestHMobStore { List results = new ArrayList<>(); scanner.next(results); - Collections.sort(results, CellComparator.COMPARATOR); + Collections.sort(results, CellComparatorImpl.COMPARATOR); scanner.close(); //Compare @@ -273,7 +273,7 @@ public class TestHMobStore { List results = new ArrayList<>(); scanner.next(results); - Collections.sort(results, CellComparator.COMPARATOR); + Collections.sort(results, CellComparatorImpl.COMPARATOR); scanner.close(); //Compare @@ -318,7 +318,7 @@ public class TestHMobStore { List results = new ArrayList<>(); scanner.next(results); - Collections.sort(results, CellComparator.COMPARATOR); + Collections.sort(results, CellComparatorImpl.COMPARATOR); scanner.close(); //Compare @@ -363,7 +363,7 @@ public class TestHMobStore { List results = new ArrayList<>(); scanner.next(results); - Collections.sort(results, CellComparator.COMPARATOR); + Collections.sort(results, CellComparatorImpl.COMPARATOR); scanner.close(); //Compare @@ -415,7 +415,7 @@ public class TestHMobStore { List results = new ArrayList<>(); scanner.next(results); - Collections.sort(results, CellComparator.COMPARATOR); + Collections.sort(results, CellComparatorImpl.COMPARATOR); scanner.close(); //Compare @@ -531,7 +531,7 @@ public class TestHMobStore { List results = new ArrayList<>(); scanner.next(results); - Collections.sort(results, CellComparator.COMPARATOR); + Collections.sort(results, CellComparatorImpl.COMPARATOR); scanner.close(); Assert.assertEquals(expected.size(), results.size()); for(int i=0; i createStoreEngine(HStore store, Configuration conf, - CellComparator kvComparator) throws IOException { + CellComparatorImpl kvComparator) throws IOException { List storefiles = Arrays.asList(mockStoreFile(currentTime - 10), mockStoreFile(currentTime - 100), mockStoreFile(currentTime - 1000), mockStoreFile(currentTime - 10000)); @@ -1655,7 +1655,7 @@ public class TestHStore { public static class MyCompactingMemStoreWithCustomCompactor extends CompactingMemStore { private static final AtomicInteger RUNNER_COUNT = new AtomicInteger(0); - public MyCompactingMemStoreWithCustomCompactor(Configuration conf, CellComparator c, + public MyCompactingMemStoreWithCustomCompactor(Configuration conf, CellComparatorImpl c, HStore store, RegionServicesForStores regionServices, MemoryCompactionPolicy compactionPolicy) throws IOException { super(conf, c, store, regionServices, compactionPolicy); @@ -1680,7 +1680,7 @@ public class TestHStore { private static final AtomicBoolean START_TEST = new AtomicBoolean(false); private final CountDownLatch getScannerLatch = new CountDownLatch(1); private final CountDownLatch snapshotLatch = new CountDownLatch(1); - public MyCompactingMemStore(Configuration conf, CellComparator c, + public MyCompactingMemStore(Configuration conf, CellComparatorImpl c, HStore store, RegionServicesForStores regionServices, MemoryCompactionPolicy compactionPolicy) throws IOException { super(conf, c, store, regionServices, compactionPolicy); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java index d574e75..24ab7a1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java @@ -25,7 +25,7 @@ import java.util.Arrays; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.RegionServerTests; @@ -77,7 +77,7 @@ public class TestKeyValueHeap extends HBaseTestCase { public List assertCells(List expected, List scanners) throws IOException { //Creating KeyValueHeap - KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparator.COMPARATOR); + KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR); List actual = new ArrayList<>(); while(kvh.peek() != null){ @@ -106,7 +106,7 @@ public class TestKeyValueHeap extends HBaseTestCase { //Check if result is sorted according to Comparator for(int i=0; i scanners = new ArrayList<>(Arrays.asList(s1, s2, s3, s4)); // Creating KeyValueHeap - KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparator.COMPARATOR); + KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR); try { for (KeyValueScanner scanner : scanners) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueScanFixture.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueScanFixture.java index 0e96682..296cf0c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueScanFixture.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueScanFixture.java @@ -24,7 +24,7 @@ import java.io.IOException; import junit.framework.TestCase; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueTestUtil; import org.apache.hadoop.hbase.KeyValueUtil; @@ -46,7 +46,7 @@ public class TestKeyValueScanFixture extends TestCase { KeyValueTestUtil.create("RowB", "family", "qf1", 10, KeyValue.Type.Put, "value-10") }; - KeyValueScanner scan = new KeyValueScanFixture(CellComparator.COMPARATOR, kvs); + KeyValueScanner scan = new KeyValueScanFixture(CellComparatorImpl.COMPARATOR, kvs); KeyValue kv = KeyValueUtil.createFirstOnRow(Bytes.toBytes("RowA")); // should seek to this: diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java index 8a3a6dd..3706f4e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java @@ -40,7 +40,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -447,7 +447,7 @@ public class TestMobStoreCompaction { long timeToPurgeDeletes = Math.max(conf.getLong("hbase.hstore.time.to.purge.deletes", 0), 0); long ttl = HStore.determineTTLFromFamily(hcd); ScanInfo scanInfo = new ScanInfo(copyOfConf, hcd, ttl, timeToPurgeDeletes, - CellComparator.COMPARATOR); + CellComparatorImpl.COMPARATOR); StoreScanner scanner = new StoreScanner(scanInfo, ScanType.COMPACT_DROP_DELETES, scanners); try { size += UTIL.countRows(scanner); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java index d22046c..6bffccf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java @@ -38,7 +38,7 @@ import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -223,7 +223,7 @@ public class TestMultiColumnScanner { region.flush(true); } - Collections.sort(kvs, CellComparator.COMPARATOR); + Collections.sort(kvs, CellComparatorImpl.COMPARATOR); for (int maxVersions = 1; maxVersions <= TIMESTAMPS.length; ++maxVersions) { for (int columnBitMask = 1; columnBitMask <= MAX_COLUMN_BIT_MASK; ++columnBitMask) { Scan scan = new Scan(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java index 6494d5f..7435ffd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java @@ -29,7 +29,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -179,7 +179,7 @@ public class TestRecoveredEdits { Cell previous = null; for (Cell cell: val.getCells()) { if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) continue; - if (previous != null && CellComparator.COMPARATOR.compareRows(previous, cell) == 0) + if (previous != null && CellComparatorImpl.COMPARATOR.compareRows(previous, cell) == 0) continue; previous = cell; Get g = new Get(CellUtil.cloneRow(cell)); @@ -187,7 +187,7 @@ public class TestRecoveredEdits { boolean found = false; for (CellScanner scanner = r.cellScanner(); scanner.advance();) { Cell current = scanner.current(); - if (CellComparator.COMPARATOR.compareKeyIgnoresMvcc(cell, current) == 0) { + if (CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(cell, current) == 0) { found = true; break; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java index dbf3be0..07c37a6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java @@ -34,7 +34,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; @@ -265,7 +265,7 @@ public class TestReversibleScanners { ScanInfo scanInfo = new ScanInfo(TEST_UTIL.getConfiguration(), FAMILYNAME, 0, Integer.MAX_VALUE, Long.MAX_VALUE, - KeepDeletedCells.FALSE, HConstants.DEFAULT_BLOCKSIZE, 0, CellComparator.COMPARATOR, false); + KeepDeletedCells.FALSE, HConstants.DEFAULT_BLOCKSIZE, 0, CellComparatorImpl.COMPARATOR, false); // Case 1.Test a full reversed scan Scan scan = new Scan(); @@ -486,7 +486,7 @@ public class TestReversibleScanners { private ReversedKeyValueHeap getReversibleKeyValueHeap(MemStore memstore, HStoreFile sf1, HStoreFile sf2, byte[] startRow, int readPoint) throws IOException { List scanners = getScanners(memstore, sf1, sf2, startRow, true, readPoint); - ReversedKeyValueHeap kvHeap = new ReversedKeyValueHeap(scanners, CellComparator.COMPARATOR); + ReversedKeyValueHeap kvHeap = new ReversedKeyValueHeap(scanners, CellComparatorImpl.COMPARATOR); return kvHeap; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java index 7907e13..f741faa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java @@ -32,7 +32,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CoordinatedStateManager; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -501,9 +501,11 @@ public class TestScannerHeartbeatMessages { @Override protected void initializeKVHeap(List scanners, List joinedScanners, HRegion region) throws IOException { - this.storeHeap = new HeartbeatReversedKVHeap(scanners, region.getCellComparator()); + this.storeHeap = + new HeartbeatReversedKVHeap(scanners, (CellComparatorImpl) region.getCellComparator()); if (!joinedScanners.isEmpty()) { - this.joinedHeap = new HeartbeatReversedKVHeap(joinedScanners, region.getCellComparator()); + this.joinedHeap = new HeartbeatReversedKVHeap(joinedScanners, + (CellComparatorImpl) region.getCellComparator()); } } } @@ -528,9 +530,11 @@ public class TestScannerHeartbeatMessages { @Override protected void initializeKVHeap(List scanners, List joinedScanners, HRegion region) throws IOException { - this.storeHeap = new HeartbeatKVHeap(scanners, region.getCellComparator()); + this.storeHeap = + new HeartbeatKVHeap(scanners, (CellComparatorImpl) region.getCellComparator()); if (!joinedScanners.isEmpty()) { - this.joinedHeap = new HeartbeatKVHeap(joinedScanners, region.getCellComparator()); + this.joinedHeap = + new HeartbeatKVHeap(joinedScanners, (CellComparatorImpl) region.getCellComparator()); } } } @@ -540,7 +544,7 @@ public class TestScannerHeartbeatMessages { * cells. Useful for testing */ private static final class HeartbeatKVHeap extends KeyValueHeap { - public HeartbeatKVHeap(List scanners, CellComparator comparator) + public HeartbeatKVHeap(List scanners, CellComparatorImpl comparator) throws IOException { super(scanners, comparator); } @@ -565,7 +569,7 @@ public class TestScannerHeartbeatMessages { */ private static final class HeartbeatReversedKVHeap extends ReversedKeyValueHeap { public HeartbeatReversedKVHeap(List scanners, - CellComparator comparator) throws IOException { + CellComparatorImpl comparator) throws IOException { super(scanners, comparator); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java index 67f6f34..901d31f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java @@ -35,7 +35,7 @@ import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -304,7 +304,7 @@ public class TestSeekOptimizations { } } expectedKVs = filteredKVs; - Collections.sort(expectedKVs, CellComparator.COMPARATOR); + Collections.sort(expectedKVs, CellComparatorImpl.COMPARATOR); } public void put(String qual, long ts) { @@ -459,7 +459,7 @@ public class TestSeekOptimizations { int i; for (i = 0; i < minLen - && CellComparator.COMPARATOR.compareKeyIgnoresMvcc(expected.get(i), actual.get(i)) == 0; + && CellComparatorImpl.COMPARATOR.compareKeyIgnoresMvcc(expected.get(i), actual.get(i)) == 0; ++i) {} if (additionalMsg == null) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java index 889eb48..fa9c705 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java @@ -42,7 +42,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; @@ -73,7 +73,7 @@ public class TestStoreScanner { private static final byte[] CF = Bytes.toBytes(CF_STR); static Configuration CONF = HBaseConfiguration.create(); private ScanInfo scanInfo = new ScanInfo(CONF, CF, 0, Integer.MAX_VALUE, Long.MAX_VALUE, - KeepDeletedCells.FALSE, HConstants.DEFAULT_BLOCKSIZE, 0, CellComparator.COMPARATOR, false); + KeepDeletedCells.FALSE, HConstants.DEFAULT_BLOCKSIZE, 0, CellComparatorImpl.COMPARATOR, false); /** * From here on down, we have a bunch of defines and specific CELL_GRID of Cells. The @@ -136,7 +136,7 @@ public class TestStoreScanner { final AtomicInteger count; public KeyValueHeapWithCount(List scanners, - CellComparator comparator, AtomicInteger count) throws IOException { + CellComparatorImpl comparator, AtomicInteger count) throws IOException { super(scanners, comparator); this.count = count; } @@ -159,11 +159,11 @@ public class TestStoreScanner { CellGridStoreScanner(final Scan scan, ScanInfo scanInfo) throws IOException { super(scan, scanInfo, scan.getFamilyMap().get(CF), Arrays. asList( - new KeyValueScanner[] { new KeyValueScanFixture(CellComparator.COMPARATOR, CELL_GRID) })); + new KeyValueScanner[] { new KeyValueScanFixture(CellComparatorImpl.COMPARATOR, CELL_GRID) })); } protected void resetKVHeap(List scanners, - CellComparator comparator) throws IOException { + CellComparatorImpl comparator) throws IOException { if (count == null) { count = new AtomicInteger(0); } @@ -222,7 +222,7 @@ public class TestStoreScanner { CellWithVersionsStoreScanner(final Scan scan, ScanInfo scanInfo) throws IOException { super(scan, scanInfo, scan.getFamilyMap().get(CF), Arrays. asList(new KeyValueScanner[] { - new KeyValueScanFixture(CellComparator.COMPARATOR, CELL_WITH_VERSIONS) })); + new KeyValueScanFixture(CellComparatorImpl.COMPARATOR, CELL_WITH_VERSIONS) })); } protected boolean trySkipToNextColumn(Cell cell) throws IOException { @@ -249,7 +249,7 @@ public class TestStoreScanner { CellWithVersionsNoOptimizeStoreScanner(Scan scan, ScanInfo scanInfo) throws IOException { super(scan, scanInfo, scan.getFamilyMap().get(CF), Arrays. asList(new KeyValueScanner[] { - new KeyValueScanFixture(CellComparator.COMPARATOR, CELL_WITH_VERSIONS) })); + new KeyValueScanFixture(CellComparatorImpl.COMPARATOR, CELL_WITH_VERSIONS) })); } protected boolean trySkipToNextColumn(Cell cell) throws IOException { @@ -453,7 +453,7 @@ public class TestStoreScanner { }; List scanners = Arrays.asList( new KeyValueScanner[] { - new KeyValueScanFixture(CellComparator.COMPARATOR, kvs) + new KeyValueScanFixture(CellComparatorImpl.COMPARATOR, kvs) }); Scan scanSpec = new Scan().withStartRow(Bytes.toBytes(r1)); scanSpec.setTimeRange(0, 6); @@ -504,7 +504,7 @@ public class TestStoreScanner { }; List scanners = Arrays.asList( new KeyValueScanner[] { - new KeyValueScanFixture(CellComparator.COMPARATOR, kvs) + new KeyValueScanFixture(CellComparatorImpl.COMPARATOR, kvs) }); Scan scanSpec = new Scan().withStartRow(Bytes.toBytes("R1")); @@ -797,7 +797,7 @@ public class TestStoreScanner { Scan scan = new Scan(); scan.readVersions(1); ScanInfo scanInfo = new ScanInfo(CONF, CF, 0, 1, 500, KeepDeletedCells.FALSE, - HConstants.DEFAULT_BLOCKSIZE, 0, CellComparator.COMPARATOR, false); + HConstants.DEFAULT_BLOCKSIZE, 0, CellComparatorImpl.COMPARATOR, false); try (StoreScanner scanner = new StoreScanner(scan, scanInfo, null, scanners)) { List results = new ArrayList<>(); assertEquals(true, scanner.next(results)); @@ -863,7 +863,7 @@ public class TestStoreScanner { scan.readVersions(1); // scanner with ttl equal to 500 ScanInfo scanInfo = new ScanInfo(CONF, CF, 0, 1, 500, KeepDeletedCells.FALSE, - HConstants.DEFAULT_BLOCKSIZE, 0, CellComparator.COMPARATOR, false); + HConstants.DEFAULT_BLOCKSIZE, 0, CellComparatorImpl.COMPARATOR, false); try (StoreScanner scanner = new StoreScanner(scan, scanInfo, null, scanners)) { List results = new ArrayList<>(); assertEquals(true, scanner.next(results)); @@ -925,7 +925,7 @@ public class TestStoreScanner { KeepDeletedCells.FALSE /* keepDeletedCells */, HConstants.DEFAULT_BLOCKSIZE /* block size */, 200, /* timeToPurgeDeletes */ - CellComparator.COMPARATOR, false); + CellComparatorImpl.COMPARATOR, false); try (StoreScanner scanner = new StoreScanner(scanInfo, OptionalInt.of(2), ScanType.COMPACT_DROP_DELETES, scanners)) { List results = new ArrayList<>(); @@ -954,7 +954,7 @@ public class TestStoreScanner { create("R1", "cf", "a", now - 10, KeyValue.Type.Put, "dont-care"), }; List scanners = scanFixture(kvs); ScanInfo scanInfo = new ScanInfo(CONF, CF, 0, 1, 500, KeepDeletedCells.FALSE, - HConstants.DEFAULT_BLOCKSIZE, 0, CellComparator.COMPARATOR, false); + HConstants.DEFAULT_BLOCKSIZE, 0, CellComparatorImpl.COMPARATOR, false); try (StoreScanner storeScanner = new StoreScanner(scanInfo, OptionalInt.empty(), ScanType.COMPACT_RETAIN_DELETES, scanners)) { assertFalse(storeScanner.isScanUsePread()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java index 41f124d..e2925ed 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java @@ -34,7 +34,7 @@ import java.util.OptionalLong; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl; @@ -115,7 +115,7 @@ public class TestStripeStoreEngine { private static TestStoreEngine createEngine(Configuration conf) throws Exception { HStore store = mock(HStore.class); - CellComparator kvComparator = mock(CellComparator.class); + CellComparatorImpl kvComparator = mock(CellComparatorImpl.class); return (TestStoreEngine)StoreEngine.create(store, conf, kvComparator); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java index 6e5aeed..b76cd1a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java @@ -36,7 +36,7 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -608,7 +608,7 @@ public class TestStripeStoreFileManager { ArrayList sfs, Configuration conf) throws Exception { StripeStoreConfig config = new StripeStoreConfig( conf, Mockito.mock(StoreConfigInformation.class)); - StripeStoreFileManager result = new StripeStoreFileManager(CellComparator.COMPARATOR, conf, + StripeStoreFileManager result = new StripeStoreFileManager(CellComparatorImpl.COMPARATOR, conf, config); result.loadFiles(sfs); return result; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestDateTieredCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestDateTieredCompactor.java index c1a9c29..a5a0e78 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestDateTieredCompactor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestDateTieredCompactor.java @@ -36,7 +36,7 @@ import java.util.OptionalLong; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; @@ -94,7 +94,7 @@ public class TestDateTieredCompactor { final Scanner scanner = new Scanner(input); // Create store mock that is satisfactory for compactor. HColumnDescriptor col = new HColumnDescriptor(NAME_OF_THINGS); - ScanInfo si = new ScanInfo(conf, col, Long.MAX_VALUE, 0, CellComparator.COMPARATOR); + ScanInfo si = new ScanInfo(conf, col, Long.MAX_VALUE, 0, CellComparatorImpl.COMPARATOR); HStore store = mock(HStore.class); when(store.getStorefiles()).thenReturn(storefiles); when(store.getColumnFamilyDescriptor()).thenReturn(col); @@ -104,7 +104,7 @@ public class TestDateTieredCompactor { when(store.getRegionInfo()).thenReturn(new HRegionInfo(TABLE_NAME)); when(store.createWriterInTmp(anyLong(), any(Compression.Algorithm.class), anyBoolean(), anyBoolean(), anyBoolean(), anyBoolean())).thenAnswer(writers); - when(store.getComparator()).thenReturn(CellComparator.COMPARATOR); + when(store.getComparator()).thenReturn(CellComparatorImpl.COMPARATOR); OptionalLong maxSequenceId = StoreUtils.getMaxSequenceIdInList(storefiles); when(store.getMaxSequenceId()).thenReturn(maxSequenceId); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java index f3cb293..65a813d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java @@ -47,7 +47,7 @@ import java.util.OptionalLong; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; @@ -594,7 +594,7 @@ public class TestStripeCompactionPolicy { protected void verifyFlush(StripeCompactionPolicy policy, StripeInformationProvider si, KeyValue[] input, KeyValue[][] expected, byte[][] boundaries) throws IOException { StoreFileWritersCapture writers = new StoreFileWritersCapture(); - StripeStoreFlusher.StripeFlushRequest req = policy.selectFlush(CellComparator.COMPARATOR, si, + StripeStoreFlusher.StripeFlushRequest req = policy.selectFlush(CellComparatorImpl.COMPARATOR, si, input.length); StripeMultiFileWriter mw = req.createWriter(); mw.init(null, writers); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactor.java index bd3a803..dbf95f3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactor.java @@ -34,7 +34,7 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -194,7 +194,7 @@ public class TestStripeCompactor { // Create store mock that is satisfactory for compactor. HColumnDescriptor col = new HColumnDescriptor(NAME_OF_THINGS); - ScanInfo si = new ScanInfo(conf, col, Long.MAX_VALUE, 0, CellComparator.COMPARATOR); + ScanInfo si = new ScanInfo(conf, col, Long.MAX_VALUE, 0, CellComparatorImpl.COMPARATOR); HStore store = mock(HStore.class); when(store.getColumnFamilyDescriptor()).thenReturn(col); when(store.getScanInfo()).thenReturn(si); @@ -203,7 +203,7 @@ public class TestStripeCompactor { when(store.getRegionInfo()).thenReturn(new HRegionInfo(TABLE_NAME)); when(store.createWriterInTmp(anyLong(), any(Compression.Algorithm.class), anyBoolean(), anyBoolean(), anyBoolean(), anyBoolean())).thenAnswer(writers); - when(store.getComparator()).thenReturn(CellComparator.COMPARATOR); + when(store.getComparator()).thenReturn(CellComparatorImpl.COMPARATOR); return new StripeCompactor(conf, store) { @Override diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/AbstractTestScanQueryMatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/AbstractTestScanQueryMatcher.java index df33d824..049ee74 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/AbstractTestScanQueryMatcher.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/AbstractTestScanQueryMatcher.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.regionserver.querymatcher; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Scan; @@ -45,7 +45,7 @@ public class AbstractTestScanQueryMatcher { protected Get get; protected long ttl = Long.MAX_VALUE; - protected CellComparator rowComparator; + protected CellComparatorImpl rowComparator; protected Scan scan; @Before @@ -72,6 +72,6 @@ public class AbstractTestScanQueryMatcher { get.addColumn(fam2, col5); this.scan = new Scan(get); - rowComparator = CellComparator.COMPARATOR; + rowComparator = CellComparatorImpl.COMPARATOR; } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java index 0d0004d..be65576 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java @@ -32,7 +32,7 @@ import java.util.Set; import java.util.TreeMap; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.KeyValue; @@ -328,7 +328,7 @@ public class TestReplicationWALEntryFilters { List cells2 = e2.getEdit().getCells(); Assert.assertEquals(cells1.size(), cells2.size()); for (int i = 0; i < cells1.size(); i++) { - CellComparator.COMPARATOR.compare(cells1.get(i), cells2.get(i)); + CellComparatorImpl.COMPARATOR.compare(cells1.get(i), cells2.get(i)); } } } diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala index d7e3f4f..0c51b28 100644 --- a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala +++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/HBaseContext.scala @@ -917,7 +917,7 @@ class HBaseContext(@transient sc: SparkContext, new WriterLength(0, new StoreFileWriter.Builder(conf, new CacheConfig(tempConf), new HFileSystem(fs)) .withBloomType(BloomType.valueOf(familyOptions.bloomType)) - .withComparator(CellComparator.COMPARATOR).withFileContext(hFileContext) + .withComparator(CellComparatorImpl.COMPARATOR).withFileContext(hFileContext) .withFilePath(new Path(familydir, "_" + UUID.randomUUID.toString.replaceAll("-", ""))) .withFavoredNodes(favoredNodes).build())