diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java index 5518f3a..fa696ca 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java @@ -35,7 +35,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.master.RegionState; @@ -967,7 +966,7 @@ public class HRegionInfo implements Comparable { /** * @return Comparator to use comparing {@link KeyValue}s. */ - public KVComparator getComparator() { + public CellComparator getComparator() { return isMetaRegion()? KeyValue.META_COMPARATOR: KeyValue.COMPARATOR; } diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java index 019ede7..febd869 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java @@ -26,7 +26,7 @@ import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -213,7 +213,7 @@ final public class FilterList extends Filter { @Override public Cell transformCell(Cell c) throws IOException { - if (!CellComparator.equals(c, referenceCell)) { + if (!CellUtil.equals(c, referenceCell)) { throw new IllegalStateException("Reference Cell: " + this.referenceCell + " does not match: " + c); } diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java index e9c83cc..a92dc57 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java @@ -19,12 +19,12 @@ package org.apache.hadoop.hbase; import java.io.Serializable; -import java.util.Comparator; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.io.RawComparator; import com.google.common.primitives.Longs; @@ -40,15 +40,21 @@ import com.google.common.primitives.Longs; justification="Findbugs doesn't like the way we are negating the result of a compare in below") @InterfaceAudience.Private @InterfaceStability.Evolving -public class CellComparator implements Comparator, Serializable{ +public class CellComparator implements RawComparator, Serializable { private static final long serialVersionUID = -8760041766259623329L; + public static CellComparator CELL_COMPARATOR = new CellComparator(); + @Override public int compare(Cell a, Cell b) { - return compareStatic(a, b, false); + return compare(a, b, false); + } + + public int compareOnlyKeyPortion(Cell left, Cell right) { + return compare(left, right, true); } - public static int compareStatic(Cell a, Cell b, boolean onlyKey) { + private int compare(Cell a, Cell b, boolean onlyKey) { // row int c = compareRows(a, b); if (c != 0) return c; @@ -84,7 +90,7 @@ public class CellComparator implements Comparator, Serializable{ * @param c * @return Long.MAX_VALUE if there is no LOG_REPLAY_TAG */ - private static long getReplaySeqNum(final Cell c) { + private long getReplaySeqNum(final Cell c) { Tag tag = Tag.getTag(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength(), TagType.LOG_REPLAY_TAG_TYPE); @@ -94,74 +100,7 @@ public class CellComparator implements Comparator, Serializable{ return Long.MAX_VALUE; } - public static int findCommonPrefixInRowPart(Cell left, Cell right, int rowCommonPrefix) { - return findCommonPrefix(left.getRowArray(), right.getRowArray(), left.getRowLength() - - rowCommonPrefix, right.getRowLength() - rowCommonPrefix, left.getRowOffset() - + rowCommonPrefix, right.getRowOffset() + rowCommonPrefix); - } - - private static int findCommonPrefix(byte[] left, byte[] right, int leftLength, int rightLength, - int leftOffset, int rightOffset) { - int length = Math.min(leftLength, rightLength); - int result = 0; - - while (result < length && left[leftOffset + result] == right[rightOffset + result]) { - result++; - } - return result; - } - - public static int findCommonPrefixInFamilyPart(Cell left, Cell right, int familyCommonPrefix) { - return findCommonPrefix(left.getFamilyArray(), right.getFamilyArray(), left.getFamilyLength() - - familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix, left.getFamilyOffset() - + familyCommonPrefix, right.getFamilyOffset() + familyCommonPrefix); - } - - public static int findCommonPrefixInQualifierPart(Cell left, Cell right, - int qualifierCommonPrefix) { - return findCommonPrefix(left.getQualifierArray(), right.getQualifierArray(), - left.getQualifierLength() - qualifierCommonPrefix, right.getQualifierLength() - - qualifierCommonPrefix, left.getQualifierOffset() + qualifierCommonPrefix, - right.getQualifierOffset() + qualifierCommonPrefix); - } - - /**************** equals ****************************/ - - public static boolean equals(Cell a, Cell b){ - return equalsRow(a, b) - && equalsFamily(a, b) - && equalsQualifier(a, b) - && equalsTimestamp(a, b) - && equalsType(a, b); - } - - public static boolean equalsRow(Cell a, Cell b){ - return Bytes.equals( - a.getRowArray(), a.getRowOffset(), a.getRowLength(), - b.getRowArray(), b.getRowOffset(), b.getRowLength()); - } - - public static boolean equalsFamily(Cell a, Cell b){ - return Bytes.equals( - a.getFamilyArray(), a.getFamilyOffset(), a.getFamilyLength(), - b.getFamilyArray(), b.getFamilyOffset(), b.getFamilyLength()); - } - - public static boolean equalsQualifier(Cell a, Cell b){ - return Bytes.equals( - a.getQualifierArray(), a.getQualifierOffset(), a.getQualifierLength(), - b.getQualifierArray(), b.getQualifierOffset(), b.getQualifierLength()); - } - - public static boolean equalsTimestamp(Cell a, Cell b){ - return a.getTimestamp() == b.getTimestamp(); - } - - public static boolean equalsType(Cell a, Cell b){ - return a.getTypeByte() == b.getTypeByte(); - } - - public static int compareColumns(final Cell left, final Cell right) { + public int compareColumns(final Cell left, final Cell right) { int lfoffset = left.getFamilyOffset(); int rfoffset = right.getFamilyOffset(); int lclength = left.getQualifierLength(); @@ -178,36 +117,38 @@ public class CellComparator implements Comparator, Serializable{ } } - public static int compareFamilies(Cell left, Cell right) { + public int compareFamilies(Cell left, Cell right) { return Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(), right.getFamilyArray(), right.getFamilyOffset(), right.getFamilyLength()); } - public static int compareQualifiers(Cell left, Cell right) { + public int compareQualifiers(Cell left, Cell right) { return Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset(), left.getQualifierLength(), right.getQualifierArray(), right.getQualifierOffset(), right.getQualifierLength()); } - public int compareFlatKey(Cell left, Cell right) { - int compare = compareRows(left, right); - if (compare != 0) { - return compare; - } - return compareWithoutRow(left, right); + public int compareFlatKey(byte[] left, int lOffset, int lLength, byte[] right, int rOffset, + int rLength) { + return compareOnlyKeyPortion(new KeyValue.KeyOnlyKeyValue(left, lOffset, lLength), + new KeyValue.KeyOnlyKeyValue(right, rOffset, rLength)); } - public static int compareRows(final Cell left, final Cell right) { + public int compareFlatKey(byte[] left, byte[] right) { + return compareFlatKey(left, 0, left.length, right, 0, right.length); + } + + public int compareRows(final Cell left, final Cell right) { return Bytes.compareTo(left.getRowArray(), left.getRowOffset(), left.getRowLength(), right.getRowArray(), right.getRowOffset(), right.getRowLength()); } - public static int compareRows(byte[] left, int loffset, int llength, byte[] right, int roffset, + public int compareRows(byte[] left, int loffset, int llength, byte[] right, int roffset, int rlength) { return Bytes.compareTo(left, loffset, llength, right, roffset, rlength); } - public static int compareWithoutRow(final Cell leftCell, final Cell rightCell) { + public int compareWithoutRow(final Cell leftCell, final Cell rightCell) { if (leftCell.getFamilyLength() + leftCell.getQualifierLength() == 0 && leftCell.getTypeByte() == Type.Minimum.getCode()) { // left is "bigger", i.e. it appears later in the sorted order @@ -220,15 +161,12 @@ public class CellComparator implements Comparator, Serializable{ boolean sameFamilySize = (leftCell.getFamilyLength() == rightCell.getFamilyLength()); if (!sameFamilySize) { // comparing column family is enough. - - return Bytes.compareTo(leftCell.getFamilyArray(), leftCell.getFamilyOffset(), - leftCell.getFamilyLength(), rightCell.getFamilyArray(), rightCell.getFamilyOffset(), - rightCell.getFamilyLength()); + return compareFamilies(leftCell, rightCell); } int diff = compareColumns(leftCell, rightCell); if (diff != 0) return diff; - diff = compareTimestamps(leftCell, rightCell); + diff = CellUtil.compareTimestamps(leftCell, rightCell); if (diff != 0) return diff; // Compare types. Let the delete types sort ahead of puts; i.e. types @@ -238,67 +176,29 @@ public class CellComparator implements Comparator, Serializable{ return (0xff & rightCell.getTypeByte()) - (0xff & leftCell.getTypeByte()); } - public static int compareTimestamps(final Cell left, final Cell right) { - long ltimestamp = left.getTimestamp(); - long rtimestamp = right.getTimestamp(); - return compareTimestamps(ltimestamp, rtimestamp); + public int compareTimestamps(final Cell left, final Cell right) { + return CellUtil.compareTimestamps(left, right); } - - /********************* hashCode ************************/ - - /** - * Returns a hash code that is always the same for two Cells having a matching equals(..) result. - * Currently does not guard against nulls, but it could if necessary. - */ - public static int hashCode(Cell cell){ - if (cell == null) {// return 0 for empty Cell - return 0; - } - - //pre-calculate the 3 hashes made of byte ranges - int rowHash = Bytes.hashCode(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); - int familyHash = - Bytes.hashCode(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()); - int qualifierHash = Bytes.hashCode(cell.getQualifierArray(), cell.getQualifierOffset(), - cell.getQualifierLength()); - - //combine the 6 sub-hashes - int hash = 31 * rowHash + familyHash; - hash = 31 * hash + qualifierHash; - hash = 31 * hash + (int)cell.getTimestamp(); - hash = 31 * hash + cell.getTypeByte(); - hash = 31 * hash + (int)cell.getMvccVersion(); - return hash; + + public int compareTimestamps(final long ts1, final long ts2) { + return CellUtil.compareTimestamps(ts1, ts2); } - - /******************** lengths *************************/ - - public static boolean areKeyLengthsEqual(Cell a, Cell b) { - return a.getRowLength() == b.getRowLength() - && a.getFamilyLength() == b.getFamilyLength() - && a.getQualifierLength() == b.getQualifierLength(); - } - - public static boolean areRowLengthsEqual(Cell a, Cell b) { - return a.getRowLength() == b.getRowLength(); - } - - /*********************common prefixes*************************/ - private static int compare(byte[] left, int leftOffset, int leftLength, byte[] right, + @Override + public int compare(byte[] left, int leftOffset, int leftLength, byte[] right, int rightOffset, int rightLength) { - return Bytes.compareTo(left, leftOffset, leftLength, right, rightOffset, rightLength); + return CellUtil.compare(left, leftOffset, leftLength, right, rightOffset, rightLength); } - public static int compareCommonRowPrefix(Cell left, Cell right, int rowCommonPrefix) { + public int compareCommonRowPrefix(Cell left, Cell right, int rowCommonPrefix) { return compare(left.getRowArray(), left.getRowOffset() + rowCommonPrefix, left.getRowLength() - rowCommonPrefix, right.getRowArray(), right.getRowOffset() + rowCommonPrefix, right.getRowLength() - rowCommonPrefix); } - public static int compareCommonFamilyPrefix(Cell left, Cell right, + public int compareCommonFamilyPrefix(Cell left, Cell right, int familyCommonPrefix) { return compare(left.getFamilyArray(), left.getFamilyOffset() + familyCommonPrefix, left.getFamilyLength() - familyCommonPrefix, right.getFamilyArray(), @@ -306,7 +206,7 @@ public class CellComparator implements Comparator, Serializable{ right.getFamilyLength() - familyCommonPrefix); } - public static int compareCommonQualifierPrefix(Cell left, Cell right, + public int compareCommonQualifierPrefix(Cell left, Cell right, int qualCommonPrefix) { return compare(left.getQualifierArray(), left.getQualifierOffset() + qualCommonPrefix, left.getQualifierLength() - qualCommonPrefix, right.getQualifierArray(), @@ -314,43 +214,68 @@ public class CellComparator implements Comparator, Serializable{ - qualCommonPrefix); } - /***************** special cases ****************************/ /** - * special case for KeyValue.equals + * Compares the row of two keyvalues for equality + * @param left + * @param right + * @return True if rows match. */ - public static boolean equalsIgnoreMvccVersion(Cell a, Cell b){ - return 0 == compareStaticIgnoreMvccVersion(a, b); + public boolean matchingRows(final Cell left, final Cell right) { + short lrowlength = left.getRowLength(); + short rrowlength = right.getRowLength(); + return matchingRows(left, lrowlength, right, rrowlength); } - private static int compareStaticIgnoreMvccVersion(Cell a, Cell b) { - // row - int c = compareRows(a, b); - if (c != 0) return c; - - // family - c = compareColumns(a, b); - if (c != 0) return c; - - // timestamp: later sorts first - c = compareTimestamps(a, b); - if (c != 0) return c; + /** + * @param left + * @param lrowlength + * @param right + * @param rrowlength + * @return True if rows match. + */ + public boolean matchingRows(final Cell left, final short lrowlength, + final Cell right, final short rrowlength) { + return lrowlength == rrowlength && + matchingRows(left.getRowArray(), left.getRowOffset(), lrowlength, + right.getRowArray(), right.getRowOffset(), rrowlength); + } - //type - c = (0xff & b.getTypeByte()) - (0xff & a.getTypeByte()); - return c; + /** + * Compare rows. Just calls Bytes.equals, but it's good to have this encapsulated. + * @param left Left row array. + * @param loffset Left row offset. + * @param llength Left row length. + * @param right Right row array. + * @param roffset Right row offset. + * @param rlength Right row length. + * @return Whether rows are the same row. + */ + public boolean matchingRows(final byte [] left, final int loffset, final int llength, + final byte [] right, final int roffset, final int rlength) { + return Bytes.equals(left, loffset, llength, right, roffset, rlength); } + + /** + * Compares the row and column of two keyvalues for equality + * @param left + * @param right + * @return True if same row and column. + */ + public boolean matchingRowColumn(final Cell left, + final Cell right) { + short lrowlength = left.getRowLength(); + short rrowlength = right.getRowLength(); + + // TsOffset = end of column data. just comparing Row+CF length of each + if ((left.getRowLength() + left.getFamilyLength() + left.getQualifierLength()) != (right + .getRowLength() + right.getFamilyLength() + right.getQualifierLength())) { + return false; + } - private static int compareTimestamps(final long ltimestamp, final long rtimestamp) { - // The below older timestamps sorting ahead of newer timestamps looks - // wrong but it is intentional. This way, newer timestamps are first - // found when we iterate over a memstore and newer versions are the - // first we trip over when reading from a store file. - if (ltimestamp < rtimestamp) { - return 1; - } else if (ltimestamp > rtimestamp) { - return -1; + if (!matchingRows(left, lrowlength, right, rrowlength)) { + return false; } - return 0; + return 0 == compareColumns(left, right); } /** diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index fd7d252..03d9992 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -530,4 +530,194 @@ public final class CellUtil { + SettableSequenceId.class.getName())); } } + /***************** special cases ****************************/ + + /** + * special case for KeyValue.equals + */ + public static boolean equalsIgnoreMvccVersion(Cell a, Cell b){ + return 0 == compareStaticIgnoreMvccVersion(a, b); + } + + public static int compareStaticIgnoreMvccVersion(Cell a, Cell b) { + // row + int c = Bytes.compareTo(a.getRowArray(), a.getRowOffset(), a.getRowLength(), + b.getRowArray(), b.getRowOffset(), b.getRowLength()); + if (c != 0) return c; + + // family + c = compareColumns(a, b); + if (c != 0) return c; + + // timestamp: later sorts first + c = compareTimestamps(a, b); + if (c != 0) return c; + + //type + c = (0xff & b.getTypeByte()) - (0xff & a.getTypeByte()); + return c; + } + + public static int compareColumns(final Cell left, final Cell right) { + int lfoffset = left.getFamilyOffset(); + int rfoffset = right.getFamilyOffset(); + int lclength = left.getQualifierLength(); + int rclength = right.getQualifierLength(); + int lfamilylength = left.getFamilyLength(); + int rfamilylength = right.getFamilyLength(); + int diff = compare(left.getFamilyArray(), lfoffset, lfamilylength, right.getFamilyArray(), + rfoffset, rfamilylength); + if (diff != 0) { + return diff; + } else { + return compare(left.getQualifierArray(), left.getQualifierOffset(), lclength, + right.getQualifierArray(), right.getQualifierOffset(), rclength); + } + } + + public static int compare(byte[] left, int leftOffset, int leftLength, byte[] right, + int rightOffset, int rightLength) { + return Bytes.compareTo(left, leftOffset, leftLength, right, rightOffset, rightLength); + } + + public static int compareTimestamps(final Cell left, final Cell right) { + long ltimestamp = left.getTimestamp(); + long rtimestamp = right.getTimestamp(); + return compareTimestamps(ltimestamp, rtimestamp); + } + + public static int compareTimestamps(final long ltimestamp, final long rtimestamp) { + // The below older timestamps sorting ahead of newer timestamps looks + // wrong but it is intentional. This way, newer timestamps are first + // found when we iterate over a memstore and newer versions are the + // first we trip over when reading from a store file. + if (ltimestamp < rtimestamp) { + return 1; + } else if (ltimestamp > rtimestamp) { + return -1; + } + return 0; + } + + + /** + * Create a new KeyValue by copying existing cell and adding new tags + * @param c + * @param newTags + * @return a new KeyValue instance with new tags + */ + public static Cell cloneAndAddTags(Cell c, List newTags) { + List existingTags = null; + if(c.getTagsLength() > 0) { + existingTags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); + existingTags.addAll(newTags); + } else { + existingTags = newTags; + } + return new KeyValue(c.getRowArray(), c.getRowOffset(), c.getRowLength(), + c.getFamilyArray(), c.getFamilyOffset(), c.getFamilyLength(), + c.getQualifierArray(), c.getQualifierOffset(), c.getQualifierLength(), + c.getTimestamp(), Type.codeToType(c.getTypeByte()), c.getValueArray(), c.getValueOffset(), + c.getValueLength(), existingTags); + } + + /******************** lengths *************************/ + + public static boolean areKeyLengthsEqual(Cell a, Cell b) { + return a.getRowLength() == b.getRowLength() && a.getFamilyLength() == b.getFamilyLength() + && a.getQualifierLength() == b.getQualifierLength(); + } + + public static boolean areRowLengthsEqual(Cell a, Cell b) { + return a.getRowLength() == b.getRowLength(); + } + + /********************* hashCode ************************/ + + /** + * Returns a hash code that is always the same for two Cells having a matching + * equals(..) result. Currently does not guard against nulls, but it could if + * necessary. + */ + public static int hashCode(Cell cell) { + if (cell == null) {// return 0 for empty Cell + return 0; + } + + // pre-calculate the 3 hashes made of byte ranges + int rowHash = Bytes.hashCode(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); + int familyHash = Bytes.hashCode(cell.getFamilyArray(), cell.getFamilyOffset(), + cell.getFamilyLength()); + int qualifierHash = Bytes.hashCode(cell.getQualifierArray(), cell.getQualifierOffset(), + cell.getQualifierLength()); + + // combine the 6 sub-hashes + int hash = 31 * rowHash + familyHash; + hash = 31 * hash + qualifierHash; + hash = 31 * hash + (int) cell.getTimestamp(); + hash = 31 * hash + cell.getTypeByte(); + hash = 31 * hash + (int) cell.getMvccVersion(); + return hash; + } + + public static int findCommonPrefixInRowPart(Cell left, Cell right, int rowCommonPrefix) { + return findCommonPrefix(left.getRowArray(), right.getRowArray(), left.getRowLength() + - rowCommonPrefix, right.getRowLength() - rowCommonPrefix, left.getRowOffset() + + rowCommonPrefix, right.getRowOffset() + rowCommonPrefix); + } + + public static int findCommonPrefixInFamilyPart(Cell left, Cell right, int familyCommonPrefix) { + return findCommonPrefix(left.getFamilyArray(), right.getFamilyArray(), left.getFamilyLength() + - familyCommonPrefix, right.getFamilyLength() - familyCommonPrefix, left.getFamilyOffset() + + familyCommonPrefix, right.getFamilyOffset() + familyCommonPrefix); + } + + public static int findCommonPrefixInQualifierPart(Cell left, Cell right, int qualifierCommonPrefix) { + return findCommonPrefix(left.getQualifierArray(), right.getQualifierArray(), + left.getQualifierLength() - qualifierCommonPrefix, right.getQualifierLength() + - qualifierCommonPrefix, left.getQualifierOffset() + qualifierCommonPrefix, + right.getQualifierOffset() + qualifierCommonPrefix); + } + + private static int findCommonPrefix(byte[] left, byte[] right, int leftLength, int rightLength, + int leftOffset, int rightOffset) { + int length = Math.min(leftLength, rightLength); + int result = 0; + + while (result < length && left[leftOffset + result] == right[rightOffset + result]) { + result++; + } + return result; + } + + /**************** equals ****************************/ + + public static boolean equals(Cell a, Cell b) { + return equalsRow(a, b) && equalsFamily(a, b) && equalsQualifier(a, b) && equalsTimestamp(a, b) + && equalsType(a, b); + } + + public static boolean equalsRow(Cell a, Cell b) { + return Bytes.equals(a.getRowArray(), a.getRowOffset(), a.getRowLength(), b.getRowArray(), + b.getRowOffset(), b.getRowLength()); + } + + public static boolean equalsFamily(Cell a, Cell b) { + return Bytes.equals(a.getFamilyArray(), a.getFamilyOffset(), a.getFamilyLength(), + b.getFamilyArray(), b.getFamilyOffset(), b.getFamilyLength()); + } + + public static boolean equalsQualifier(Cell a, Cell b) { + return Bytes.equals(a.getQualifierArray(), a.getQualifierOffset(), a.getQualifierLength(), + b.getQualifierArray(), b.getQualifierOffset(), b.getQualifierLength()); + } + + public static boolean equalsTimestamp(Cell a, Cell b) { + return a.getTimestamp() == b.getTimestamp(); + } + + public static boolean equalsType(Cell a, Cell b) { + return a.getTypeByte() == b.getTypeByte(); + } + } diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index e75147b..3ee74ac 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -21,11 +21,6 @@ package org.apache.hadoop.hbase; import static org.apache.hadoop.hbase.util.Bytes.len; -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; @@ -40,8 +35,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; -import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.io.RawComparator; import com.google.common.annotations.VisibleForTesting; @@ -95,12 +88,12 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId { * Comparator for plain key/values; i.e. non-catalog table key/values. Works on Key portion * of KeyValue only. */ - public static final KVComparator COMPARATOR = new KVComparator(); + public static final CellComparator COMPARATOR = new KVComparator(); /** * A {@link KVComparator} for hbase:meta catalog table * {@link KeyValue}s. */ - public static final KVComparator META_COMPARATOR = new MetaComparator(); + public static final CellComparator META_COMPARATOR = new MetaComparator(); /** * Needed for Bloom Filters. @@ -1058,7 +1051,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId { if (!(other instanceof Cell)) { return false; } - return CellComparator.equals(this, (Cell)other); + return CellUtil.equals(this, (Cell)other); } @Override @@ -1711,7 +1704,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId { if (c != 0) { return c; } - return CellComparator.compareWithoutRow(left, right); + return compareWithoutRow(left, right); } @Override @@ -1753,7 +1746,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId { // Now compare middlesection of row. lpart = (leftFarDelimiter < 0 ? llength + loffset: leftFarDelimiter) - leftDelimiter; rpart = (rightFarDelimiter < 0 ? rlength + roffset: rightFarDelimiter)- rightDelimiter; - result = super.compareRows(left, leftDelimiter, lpart, right, rightDelimiter, rpart); + result = compareRows(left, leftDelimiter, lpart, right, rightDelimiter, rpart); if (result != 0) { return result; } else { @@ -1800,7 +1793,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId { /** * Override the row key comparison to parse and compare the meta row key parts. */ - @Override protected int compareRowKey(final Cell l, final Cell r) { byte[] left = l.getRowArray(); int loffset = l.getRowOffset(); @@ -1817,7 +1809,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId { * portion. This means two KeyValues with same Key but different Values are * considered the same as far as this Comparator is concerned. */ - public static class KVComparator implements RawComparator, SamePrefixComparator { + public static class KVComparator extends CellComparator { /** * The HFileV2 file format's trailer contains this class name. We reinterpret this and @@ -1829,357 +1821,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId { return "org.apache.hadoop.hbase.KeyValue$KeyComparator"; } - @Override // RawComparator - public int compare(byte[] l, int loff, int llen, byte[] r, int roff, int rlen) { - return compareFlatKey(l,loff,llen, r,roff,rlen); - } - - - /** - * Compares the only the user specified portion of a Key. This is overridden by MetaComparator. - * @param left - * @param right - * @return 0 if equal, <0 if left smaller, >0 if right smaller - */ - protected int compareRowKey(final Cell left, final Cell right) { - return CellComparator.compareRows(left, right); - } - - /** - * Compares left to right assuming that left,loffset,llength and right,roffset,rlength are - * full KVs laid out in a flat byte[]s. - * @param left - * @param loffset - * @param llength - * @param right - * @param roffset - * @param rlength - * @return 0 if equal, <0 if left smaller, >0 if right smaller - */ - public int compareFlatKey(byte[] left, int loffset, int llength, - byte[] right, int roffset, int rlength) { - // Compare row - short lrowlength = Bytes.toShort(left, loffset); - short rrowlength = Bytes.toShort(right, roffset); - int compare = compareRows(left, loffset + Bytes.SIZEOF_SHORT, - lrowlength, right, roffset + Bytes.SIZEOF_SHORT, rrowlength); - if (compare != 0) { - return compare; - } - - // Compare the rest of the two KVs without making any assumptions about - // the common prefix. This function will not compare rows anyway, so we - // don't need to tell it that the common prefix includes the row. - return compareWithoutRow(0, left, loffset, llength, right, roffset, - rlength, rrowlength); - } - - public int compareFlatKey(byte[] left, byte[] right) { - return compareFlatKey(left, 0, left.length, right, 0, right.length); - } - - public int compareOnlyKeyPortion(Cell left, Cell right) { - return CellComparator.compareStatic(left, right, true); - } - - /** - * Compares the Key of a cell -- with fields being more significant in this order: - * rowkey, colfam/qual, timestamp, type, mvcc - */ - @Override - public int compare(final Cell left, final Cell right) { - int compare = CellComparator.compareStatic(left, right, false); - return compare; - } - - public int compareTimestamps(final Cell left, final Cell right) { - return CellComparator.compareTimestamps(left, right); - } - - /** - * @param left - * @param right - * @return Result comparing rows. - */ - public int compareRows(final Cell left, final Cell right) { - return compareRows(left.getRowArray(),left.getRowOffset(), left.getRowLength(), - right.getRowArray(), right.getRowOffset(), right.getRowLength()); - } - - /** - * Get the b[],o,l for left and right rowkey portions and compare. - * @param left - * @param loffset - * @param llength - * @param right - * @param roffset - * @param rlength - * @return 0 if equal, <0 if left smaller, >0 if right smaller - */ - public int compareRows(byte [] left, int loffset, int llength, - byte [] right, int roffset, int rlength) { - return Bytes.compareTo(left, loffset, llength, right, roffset, rlength); - } - - int compareColumns(final Cell left, final short lrowlength, final Cell right, - final short rrowlength) { - return CellComparator.compareColumns(left, right); - } - - protected int compareColumns( - byte [] left, int loffset, int llength, final int lfamilylength, - byte [] right, int roffset, int rlength, final int rfamilylength) { - // Compare family portion first. - int diff = Bytes.compareTo(left, loffset, lfamilylength, - right, roffset, rfamilylength); - if (diff != 0) { - return diff; - } - // Compare qualifier portion - return Bytes.compareTo(left, loffset + lfamilylength, - llength - lfamilylength, - right, roffset + rfamilylength, rlength - rfamilylength); - } - - static int compareTimestamps(final long ltimestamp, final long rtimestamp) { - // The below older timestamps sorting ahead of newer timestamps looks - // wrong but it is intentional. This way, newer timestamps are first - // found when we iterate over a memstore and newer versions are the - // first we trip over when reading from a store file. - if (ltimestamp < rtimestamp) { - return 1; - } else if (ltimestamp > rtimestamp) { - return -1; - } - return 0; - } - - /** - * Overridden - * @param commonPrefix - * @param left - * @param loffset - * @param llength - * @param right - * @param roffset - * @param rlength - * @return 0 if equal, <0 if left smaller, >0 if right smaller - */ - @Override // SamePrefixComparator - public int compareIgnoringPrefix(int commonPrefix, byte[] left, - int loffset, int llength, byte[] right, int roffset, int rlength) { - // Compare row - short lrowlength = Bytes.toShort(left, loffset); - short rrowlength; - - int comparisonResult = 0; - if (commonPrefix < ROW_LENGTH_SIZE) { - // almost nothing in common - rrowlength = Bytes.toShort(right, roffset); - comparisonResult = compareRows(left, loffset + ROW_LENGTH_SIZE, - lrowlength, right, roffset + ROW_LENGTH_SIZE, rrowlength); - } else { // the row length is the same - rrowlength = lrowlength; - if (commonPrefix < ROW_LENGTH_SIZE + rrowlength) { - // The rows are not the same. Exclude the common prefix and compare - // the rest of the two rows. - int common = commonPrefix - ROW_LENGTH_SIZE; - comparisonResult = compareRows( - left, loffset + common + ROW_LENGTH_SIZE, lrowlength - common, - right, roffset + common + ROW_LENGTH_SIZE, rrowlength - common); - } - } - if (comparisonResult != 0) { - return comparisonResult; - } - - assert lrowlength == rrowlength; - return compareWithoutRow(commonPrefix, left, loffset, llength, right, - roffset, rlength, lrowlength); - } - - /** - * Compare columnFamily, qualifier, timestamp, and key type (everything - * except the row). This method is used both in the normal comparator and - * the "same-prefix" comparator. Note that we are assuming that row portions - * of both KVs have already been parsed and found identical, and we don't - * validate that assumption here. - * @param commonPrefix - * the length of the common prefix of the two key-values being - * compared, including row length and row - */ - private int compareWithoutRow(int commonPrefix, byte[] left, int loffset, - int llength, byte[] right, int roffset, int rlength, short rowlength) { - /*** - * KeyValue Format and commonLength: - * |_keyLen_|_valLen_|_rowLen_|_rowKey_|_famiLen_|_fami_|_Quali_|.... - * ------------------|-------commonLength--------|-------------- - */ - int commonLength = ROW_LENGTH_SIZE + FAMILY_LENGTH_SIZE + rowlength; - - // commonLength + TIMESTAMP_TYPE_SIZE - int commonLengthWithTSAndType = TIMESTAMP_TYPE_SIZE + commonLength; - // ColumnFamily + Qualifier length. - int lcolumnlength = llength - commonLengthWithTSAndType; - int rcolumnlength = rlength - commonLengthWithTSAndType; - - byte ltype = left[loffset + (llength - 1)]; - byte rtype = right[roffset + (rlength - 1)]; - - // If the column is not specified, the "minimum" key type appears the - // latest in the sorted order, regardless of the timestamp. This is used - // for specifying the last key/value in a given row, because there is no - // "lexicographically last column" (it would be infinitely long). The - // "maximum" key type does not need this behavior. - if (lcolumnlength == 0 && ltype == Type.Minimum.getCode()) { - // left is "bigger", i.e. it appears later in the sorted order - return 1; - } - if (rcolumnlength == 0 && rtype == Type.Minimum.getCode()) { - return -1; - } - - int lfamilyoffset = commonLength + loffset; - int rfamilyoffset = commonLength + roffset; - - // Column family length. - int lfamilylength = left[lfamilyoffset - 1]; - int rfamilylength = right[rfamilyoffset - 1]; - // If left family size is not equal to right family size, we need not - // compare the qualifiers. - boolean sameFamilySize = (lfamilylength == rfamilylength); - int common = 0; - if (commonPrefix > 0) { - common = Math.max(0, commonPrefix - commonLength); - if (!sameFamilySize) { - // Common should not be larger than Math.min(lfamilylength, - // rfamilylength). - common = Math.min(common, Math.min(lfamilylength, rfamilylength)); - } else { - common = Math.min(common, Math.min(lcolumnlength, rcolumnlength)); - } - } - if (!sameFamilySize) { - // comparing column family is enough. - return Bytes.compareTo(left, lfamilyoffset + common, lfamilylength - - common, right, rfamilyoffset + common, rfamilylength - common); - } - // Compare family & qualifier together. - final int comparison = Bytes.compareTo(left, lfamilyoffset + common, - lcolumnlength - common, right, rfamilyoffset + common, - rcolumnlength - common); - if (comparison != 0) { - return comparison; - } - - //// - // Next compare timestamps. - long ltimestamp = Bytes.toLong(left, - loffset + (llength - TIMESTAMP_TYPE_SIZE)); - long rtimestamp = Bytes.toLong(right, - roffset + (rlength - TIMESTAMP_TYPE_SIZE)); - int compare = compareTimestamps(ltimestamp, rtimestamp); - if (compare != 0) { - return compare; - } - - // Compare types. Let the delete types sort ahead of puts; i.e. types - // of higher numbers sort before those of lesser numbers. Maximum (255) - // appears ahead of everything, and minimum (0) appears after - // everything. - return (0xff & rtype) - (0xff & ltype); - } - - protected int compareFamilies(final byte[] left, final int loffset, final int lfamilylength, - final byte[] right, final int roffset, final int rfamilylength) { - int diff = Bytes.compareTo(left, loffset, lfamilylength, right, roffset, rfamilylength); - return diff; - } - - protected int compareColumns(final byte[] left, final int loffset, final int lquallength, - final byte[] right, final int roffset, final int rquallength) { - int diff = Bytes.compareTo(left, loffset, lquallength, right, roffset, rquallength); - return diff; - } - /** - * Compares the row and column of two keyvalues for equality - * @param left - * @param right - * @return True if same row and column. - */ - public boolean matchingRowColumn(final Cell left, - final Cell right) { - short lrowlength = left.getRowLength(); - short rrowlength = right.getRowLength(); - - // TsOffset = end of column data. just comparing Row+CF length of each - if ((left.getRowLength() + left.getFamilyLength() + left.getQualifierLength()) != (right - .getRowLength() + right.getFamilyLength() + right.getQualifierLength())) { - return false; - } - - if (!matchingRows(left, lrowlength, right, rrowlength)) { - return false; - } - - int lfoffset = left.getFamilyOffset(); - int rfoffset = right.getFamilyOffset(); - int lclength = left.getQualifierLength(); - int rclength = right.getQualifierLength(); - int lfamilylength = left.getFamilyLength(); - int rfamilylength = right.getFamilyLength(); - int diff = compareFamilies(left.getFamilyArray(), lfoffset, lfamilylength, - right.getFamilyArray(), rfoffset, rfamilylength); - if (diff != 0) { - return false; - } else { - diff = compareColumns(left.getQualifierArray(), left.getQualifierOffset(), lclength, - right.getQualifierArray(), right.getQualifierOffset(), rclength); - return diff == 0; - } - } - - /** - * Compares the row of two keyvalues for equality - * @param left - * @param right - * @return True if rows match. - */ - public boolean matchingRows(final Cell left, final Cell right) { - short lrowlength = left.getRowLength(); - short rrowlength = right.getRowLength(); - return matchingRows(left, lrowlength, right, rrowlength); - } - - /** - * @param left - * @param lrowlength - * @param right - * @param rrowlength - * @return True if rows match. - */ - private boolean matchingRows(final Cell left, final short lrowlength, - final Cell right, final short rrowlength) { - return lrowlength == rrowlength && - matchingRows(left.getRowArray(), left.getRowOffset(), lrowlength, - right.getRowArray(), right.getRowOffset(), rrowlength); - } - - /** - * Compare rows. Just calls Bytes.equals, but it's good to have this encapsulated. - * @param left Left row array. - * @param loffset Left row offset. - * @param llength Left row length. - * @param right Right row array. - * @param roffset Right row offset. - * @param rlength Right row length. - * @return Whether rows are the same row. - */ - public boolean matchingRows(final byte [] left, final int loffset, final int llength, - final byte [] right, final int roffset, final int rlength) { - return Bytes.equals(left, loffset, llength, right, roffset, rlength); - } - public byte[] calcIndexKey(byte[] lastKeyOfPreviousBlock, byte[] firstKeyInBlock) { byte[] fakeKey = getShortMidpointKey(lastKeyOfPreviousBlock, firstKeyInBlock); if (compareFlatKey(fakeKey, firstKeyInBlock) > 0) { @@ -2271,190 +1912,15 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId { protected Object clone() throws CloneNotSupportedException { return new KVComparator(); } - - } - - /** - * @param b - * @return A KeyValue made of a byte array that holds the key-only part. - * Needed to convert hfile index members to KeyValues. - */ - public static KeyValue createKeyValueFromKey(final byte [] b) { - return createKeyValueFromKey(b, 0, b.length); - } - - /** - * @param bb - * @return A KeyValue made of a byte buffer that holds the key-only part. - * Needed to convert hfile index members to KeyValues. - */ - public static KeyValue createKeyValueFromKey(final ByteBuffer bb) { - return createKeyValueFromKey(bb.array(), bb.arrayOffset(), bb.limit()); - } - - /** - * @param b - * @param o - * @param l - * @return A KeyValue made of a byte array that holds the key-only part. - * Needed to convert hfile index members to KeyValues. - */ - public static KeyValue createKeyValueFromKey(final byte [] b, final int o, - final int l) { - byte [] newb = new byte[l + ROW_OFFSET]; - System.arraycopy(b, o, newb, ROW_OFFSET, l); - Bytes.putInt(newb, 0, l); - Bytes.putInt(newb, Bytes.SIZEOF_INT, 0); - return new KeyValue(newb); - } - - /** - * @param in Where to read bytes from. Creates a byte array to hold the KeyValue - * backing bytes copied from the steam. - * @return KeyValue created by deserializing from in OR if we find a length - * of zero, we will return null which can be useful marking a stream as done. - * @throws IOException - */ - public static KeyValue create(final DataInput in) throws IOException { - return create(in.readInt(), in); - } - - /** - * Create a KeyValue reading length from in - * @param length - * @param in - * @return Created KeyValue OR if we find a length of zero, we will return null which - * can be useful marking a stream as done. - * @throws IOException - */ - public static KeyValue create(int length, final DataInput in) throws IOException { - - if (length <= 0) { - if (length == 0) return null; - throw new IOException("Failed read " + length + " bytes, stream corrupt?"); - } - - // This is how the old Writables.readFrom used to deserialize. Didn't even vint. - byte [] bytes = new byte[length]; - in.readFully(bytes); - return new KeyValue(bytes, 0, length); } /** - * Create a new KeyValue by copying existing cell and adding new tags - * @param c - * @param newTags - * @return a new KeyValue instance with new tags - */ - public static KeyValue cloneAndAddTags(Cell c, List newTags) { - List existingTags = null; - if(c.getTagsLength() > 0) { - existingTags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); - existingTags.addAll(newTags); - } else { - existingTags = newTags; - } - return new KeyValue(c.getRowArray(), c.getRowOffset(), (int)c.getRowLength(), - c.getFamilyArray(), c.getFamilyOffset(), (int)c.getFamilyLength(), - c.getQualifierArray(), c.getQualifierOffset(), (int) c.getQualifierLength(), - c.getTimestamp(), Type.codeToType(c.getTypeByte()), c.getValueArray(), c.getValueOffset(), - c.getValueLength(), existingTags); - } - - /** - * Create a KeyValue reading from the raw InputStream. - * Named iscreate so doesn't clash with {@link #create(DataInput)} - * @param in - * @return Created KeyValue OR if we find a length of zero, we will return null which - * can be useful marking a stream as done. - * @throws IOException - */ - public static KeyValue iscreate(final InputStream in) throws IOException { - byte [] intBytes = new byte[Bytes.SIZEOF_INT]; - int bytesRead = 0; - while (bytesRead < intBytes.length) { - int n = in.read(intBytes, bytesRead, intBytes.length - bytesRead); - if (n < 0) { - if (bytesRead == 0) return null; // EOF at start is ok - throw new IOException("Failed read of int, read " + bytesRead + " bytes"); - } - bytesRead += n; - } - // TODO: perhaps some sanity check is needed here. - byte [] bytes = new byte[Bytes.toInt(intBytes)]; - IOUtils.readFully(in, bytes, 0, bytes.length); - return new KeyValue(bytes, 0, bytes.length); - } - - /** - * Write out a KeyValue in the manner in which we used to when KeyValue was a Writable. - * @param kv - * @param out - * @return Length written on stream - * @throws IOException - * @see #create(DataInput) for the inverse function - */ - public static long write(final KeyValue kv, final DataOutput out) throws IOException { - // This is how the old Writables write used to serialize KVs. Need to figure way to make it - // work for all implementations. - int length = kv.getLength(); - out.writeInt(length); - out.write(kv.getBuffer(), kv.getOffset(), length); - return length + Bytes.SIZEOF_INT; - } - - /** - * Write out a KeyValue in the manner in which we used to when KeyValue was a Writable but do - * not require a {@link DataOutput}, just take plain {@link OutputStream} - * Named oswrite so does not clash with {@link #write(KeyValue, DataOutput)} - * @param kv - * @param out - * @return Length written on stream - * @throws IOException - * @see #create(DataInput) for the inverse function - * @see #write(KeyValue, DataOutput) - */ - @Deprecated - public static long oswrite(final KeyValue kv, final OutputStream out) - throws IOException { - int length = kv.getLength(); - // This does same as DataOuput#writeInt (big-endian, etc.) - out.write(Bytes.toBytes(length)); - out.write(kv.getBuffer(), kv.getOffset(), length); - return length + Bytes.SIZEOF_INT; - } - - /** - * Write out a KeyValue in the manner in which we used to when KeyValue was a Writable but do - * not require a {@link DataOutput}, just take plain {@link OutputStream} - * Named oswrite so does not clash with {@link #write(KeyValue, DataOutput)} - * @param kv - * @param out - * @param withTags - * @return Length written on stream - * @throws IOException - * @see #create(DataInput) for the inverse function - * @see #write(KeyValue, DataOutput) - */ - public static long oswrite(final KeyValue kv, final OutputStream out, final boolean withTags) - throws IOException { - int length = kv.getLength(); - if (!withTags) { - length = kv.getKeyLength() + kv.getValueLength() + KEYVALUE_INFRASTRUCTURE_SIZE; - } - // This does same as DataOuput#writeInt (big-endian, etc.) - out.write(Bytes.toBytes(length)); - out.write(kv.getBuffer(), kv.getOffset(), length); - return length + Bytes.SIZEOF_INT; - } - - /** * Comparator that compares row component only of a KeyValue. */ public static class RowOnlyComparator implements Comparator { - final KVComparator comparator; + final CellComparator comparator; - public RowOnlyComparator(final KVComparator c) { + public RowOnlyComparator(final CellComparator c) { this.comparator = c; } @@ -2463,22 +1929,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId { } } - - /** - * Avoids redundant comparisons for better performance. - * - * TODO get rid of this wart - */ - public interface SamePrefixComparator { - /** - * Compare two keys assuming that the first n bytes are the same. - * @param commonPrefix How many bytes are the same. - */ - int compareIgnoringPrefix( - int commonPrefix, byte[] left, int loffset, int llength, byte[] right, int roffset, int rlength - ); - } - /** * This is a TEST only Comparator used in TestSeekTo and TestReseekTo. */ @@ -2529,6 +1979,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId { return (0xff & left.getTypeByte()) - (0xff & right.getTypeByte()); } + @Override public byte[] calcIndexKey(byte[] lastKeyOfPreviousBlock, byte[] firstKeyInBlock) { return firstKeyInBlock; } diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java index 6c9fa71..05cded2 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java @@ -85,7 +85,7 @@ public class KeyValueTestUtil { for (Cell kv1 : kvCollection1) { boolean found = false; for (Cell kv2 : kvCollection2) { - if (CellComparator.equalsIgnoreMvccVersion(kv1, kv2)) found = true; + if (CellUtil.equalsIgnoreMvccVersion(kv1, kv2)) found = true; } if (!found) return false; } diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java index 02b1b0d..b894e85 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java @@ -18,6 +18,11 @@ package org.apache.hadoop.hbase; +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; @@ -28,8 +33,10 @@ import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.IterableUtils; import org.apache.hadoop.hbase.util.SimpleMutableByteRange; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.WritableUtils; +import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.collect.Lists; @@ -514,4 +521,298 @@ public class KeyValueUtil { return new ArrayList(lazyList); } + /** + * Write out a KeyValue in the manner in which we used to when KeyValue was a Writable but do + * not require a {@link DataOutput}, just take plain {@link OutputStream} + * Named oswrite so does not clash with {@link #write(KeyValue, DataOutput)} + * @param kv + * @param out + * @param withTags + * @return Length written on stream + * @throws IOException + * @see #create(DataInput) for the inverse function + * @see #write(KeyValue, DataOutput) + */ + public static long oswrite(final KeyValue kv, final OutputStream out, final boolean withTags) + throws IOException { + int length = kv.getLength(); + if (!withTags) { + length = kv.getKeyLength() + kv.getValueLength() + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE; + } + // This does same as DataOuput#writeInt (big-endian, etc.) + out.write(Bytes.toBytes(length)); + out.write(kv.getBuffer(), kv.getOffset(), length); + return length + Bytes.SIZEOF_INT; + } + + /** + * Write out a KeyValue in the manner in which we used to when KeyValue was a Writable but do + * not require a {@link DataOutput}, just take plain {@link OutputStream} + * Named oswrite so does not clash with {@link #write(KeyValue, DataOutput)} + * @param kv + * @param out + * @return Length written on stream + * @throws IOException + * @see #create(DataInput) for the inverse function + * @see #write(KeyValue, DataOutput) + */ + @Deprecated + public static long oswrite(final KeyValue kv, final OutputStream out) + throws IOException { + int length = kv.getLength(); + // This does same as DataOuput#writeInt (big-endian, etc.) + out.write(Bytes.toBytes(length)); + out.write(kv.getBuffer(), kv.getOffset(), length); + return length + Bytes.SIZEOF_INT; + } + + /** + * Write out a KeyValue in the manner in which we used to when KeyValue was a + * Writable. + * + * @param kv + * @param out + * @return Length written on stream + * @throws IOException + * @see #create(DataInput) for the inverse function + */ + public static long write(final KeyValue kv, final DataOutput out) throws IOException { + // This is how the old Writables write used to serialize KVs. Need to figure + // way to make it + // work for all implementations. + int length = kv.getLength(); + out.writeInt(length); + out.write(kv.getBuffer(), kv.getOffset(), length); + return length + Bytes.SIZEOF_INT; + } + + /** + * Create a KeyValue reading from the raw InputStream. Named + * iscreate so doesn't clash with {@link #create(DataInput)} + * + * @param in + * @return Created KeyValue OR if we find a length of zero, we will return + * null which can be useful marking a stream as done. + * @throws IOException + */ + public static KeyValue iscreate(final InputStream in) throws IOException { + byte[] intBytes = new byte[Bytes.SIZEOF_INT]; + int bytesRead = 0; + while (bytesRead < intBytes.length) { + int n = in.read(intBytes, bytesRead, intBytes.length - bytesRead); + if (n < 0) { + if (bytesRead == 0) + return null; // EOF at start is ok + throw new IOException("Failed read of int, read " + bytesRead + " bytes"); + } + bytesRead += n; + } + // TODO: perhaps some sanity check is needed here. + byte[] bytes = new byte[Bytes.toInt(intBytes)]; + IOUtils.readFully(in, bytes, 0, bytes.length); + return new KeyValue(bytes, 0, bytes.length); + } + + /** + * @param commonPrefix + * @param left + * @param loffset + * @param llength + * @param right + * @param roffset + * @param rlength + * @return 0 if equal, <0 if left smaller, >0 if right smaller + */ + @VisibleForTesting + // SamePrefixComparator + public static int compareIgnoringPrefix(int commonPrefix, byte[] left, + int loffset, int llength, byte[] right, int roffset, int rlength) { + // Compare row + short lrowlength = Bytes.toShort(left, loffset); + short rrowlength; + + int comparisonResult = 0; + if (commonPrefix < KeyValue.ROW_LENGTH_SIZE) { + // almost nothing in common + rrowlength = Bytes.toShort(right, roffset); + comparisonResult = CellComparator.CELL_COMPARATOR.compareRows(left, loffset + KeyValue.ROW_LENGTH_SIZE, + lrowlength, right, roffset + KeyValue.ROW_LENGTH_SIZE, rrowlength); + } else { // the row length is the same + rrowlength = lrowlength; + if (commonPrefix < KeyValue.ROW_LENGTH_SIZE + rrowlength) { + // The rows are not the same. Exclude the common prefix and compare + // the rest of the two rows. + int common = commonPrefix - KeyValue.ROW_LENGTH_SIZE; + comparisonResult = CellComparator.CELL_COMPARATOR.compareRows( + left, loffset + common + KeyValue.ROW_LENGTH_SIZE, lrowlength - common, + right, roffset + common + KeyValue.ROW_LENGTH_SIZE, rrowlength - common); + } + } + if (comparisonResult != 0) { + return comparisonResult; + } + + assert lrowlength == rrowlength; + return compareWithoutRow(commonPrefix, left, loffset, llength, right, + roffset, rlength, lrowlength); + } + + /** + * Compare columnFamily, qualifier, timestamp, and key type (everything + * except the row). This method is used both in the normal comparator and + * the "same-prefix" comparator. Note that we are assuming that row portions + * of both KVs have already been parsed and found identical, and we don't + * validate that assumption here. + * @param commonPrefix + * the length of the common prefix of the two key-values being + * compared, including row length and row + */ + private static int compareWithoutRow(int commonPrefix, byte[] left, int loffset, + int llength, byte[] right, int roffset, int rlength, short rowlength) { + /*** + * KeyValue Format and commonLength: + * |_keyLen_|_valLen_|_rowLen_|_rowKey_|_famiLen_|_fami_|_Quali_|.... + * ------------------|-------commonLength--------|-------------- + */ + int commonLength = KeyValue.ROW_LENGTH_SIZE + KeyValue.FAMILY_LENGTH_SIZE + rowlength; + + // commonLength + TIMESTAMP_TYPE_SIZE + int commonLengthWithTSAndType = KeyValue.TIMESTAMP_TYPE_SIZE + commonLength; + // ColumnFamily + Qualifier length. + int lcolumnlength = llength - commonLengthWithTSAndType; + int rcolumnlength = rlength - commonLengthWithTSAndType; + + byte ltype = left[loffset + (llength - 1)]; + byte rtype = right[roffset + (rlength - 1)]; + + // If the column is not specified, the "minimum" key type appears the + // latest in the sorted order, regardless of the timestamp. This is used + // for specifying the last key/value in a given row, because there is no + // "lexicographically last column" (it would be infinitely long). The + // "maximum" key type does not need this behavior. + if (lcolumnlength == 0 && ltype == Type.Minimum.getCode()) { + // left is "bigger", i.e. it appears later in the sorted order + return 1; + } + if (rcolumnlength == 0 && rtype == Type.Minimum.getCode()) { + return -1; + } + + int lfamilyoffset = commonLength + loffset; + int rfamilyoffset = commonLength + roffset; + + // Column family length. + int lfamilylength = left[lfamilyoffset - 1]; + int rfamilylength = right[rfamilyoffset - 1]; + // If left family size is not equal to right family size, we need not + // compare the qualifiers. + boolean sameFamilySize = (lfamilylength == rfamilylength); + int common = 0; + if (commonPrefix > 0) { + common = Math.max(0, commonPrefix - commonLength); + if (!sameFamilySize) { + // Common should not be larger than Math.min(lfamilylength, + // rfamilylength). + common = Math.min(common, Math.min(lfamilylength, rfamilylength)); + } else { + common = Math.min(common, Math.min(lcolumnlength, rcolumnlength)); + } + } + if (!sameFamilySize) { + // comparing column family is enough. + return Bytes.compareTo(left, lfamilyoffset + common, lfamilylength + - common, right, rfamilyoffset + common, rfamilylength - common); + } + // Compare family & qualifier together. + final int comparison = Bytes.compareTo(left, lfamilyoffset + common, + lcolumnlength - common, right, rfamilyoffset + common, + rcolumnlength - common); + if (comparison != 0) { + return comparison; + } + + //// + // Next compare timestamps. + long ltimestamp = Bytes.toLong(left, + loffset + (llength - KeyValue.TIMESTAMP_TYPE_SIZE)); + long rtimestamp = Bytes.toLong(right, + roffset + (rlength - KeyValue.TIMESTAMP_TYPE_SIZE)); + int compare = CellUtil.compareTimestamps(ltimestamp, rtimestamp); + if (compare != 0) { + return compare; + } + + // Compare types. Let the delete types sort ahead of puts; i.e. types + // of higher numbers sort before those of lesser numbers. Maximum (255) + // appears ahead of everything, and minimum (0) appears after + // everything. + return (0xff & rtype) - (0xff & ltype); + } + + /** + * @param b + * @return A KeyValue made of a byte array that holds the key-only part. + * Needed to convert hfile index members to KeyValues. + */ + public static KeyValue createKeyValueFromKey(final byte[] b) { + return createKeyValueFromKey(b, 0, b.length); + } + + /** + * @param bb + * @return A KeyValue made of a byte buffer that holds the key-only part. + * Needed to convert hfile index members to KeyValues. + */ + public static KeyValue createKeyValueFromKey(final ByteBuffer bb) { + return createKeyValueFromKey(bb.array(), bb.arrayOffset(), bb.limit()); + } + + /** + * @param b + * @param o + * @param l + * @return A KeyValue made of a byte array that holds the key-only part. + * Needed to convert hfile index members to KeyValues. + */ + public static KeyValue createKeyValueFromKey(final byte[] b, final int o, final int l) { + byte[] newb = new byte[l + KeyValue.ROW_OFFSET]; + System.arraycopy(b, o, newb, KeyValue.ROW_OFFSET, l); + Bytes.putInt(newb, 0, l); + Bytes.putInt(newb, Bytes.SIZEOF_INT, 0); + return new KeyValue(newb); + } + + + /** + * @param in Where to read bytes from. Creates a byte array to hold the KeyValue + * backing bytes copied from the steam. + * @return KeyValue created by deserializing from in OR if we find a length + * of zero, we will return null which can be useful marking a stream as done. + * @throws IOException + */ + public static KeyValue create(final DataInput in) throws IOException { + return create(in.readInt(), in); + } + + /** + * Create a KeyValue reading length from in + * @param length + * @param in + * @return Created KeyValue OR if we find a length of zero, we will return null which + * can be useful marking a stream as done. + * @throws IOException + */ + public static KeyValue create(int length, final DataInput in) throws IOException { + + if (length <= 0) { + if (length == 0) return null; + throw new IOException("Failed read " + length + " bytes, stream corrupt?"); + } + + // This is how the old Writables.readFrom used to deserialize. Didn't even vint. + byte [] bytes = new byte[length]; + in.readFully(bytes); + return new KeyValue(bytes, 0, length); + } + } diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java index 900c85b..061a975 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java @@ -18,16 +18,15 @@ package org.apache.hadoop.hbase; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.hbase.KeyValue.KVComparator; -import org.apache.hadoop.hbase.util.Bytes; - import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.util.Bytes; + /** * Immutable POJO class for representing a table name. * Which is of the form: @@ -500,7 +499,7 @@ public final class TableName implements Comparable { * * @return The comparator. */ - public KVComparator getRowComparator() { + public CellComparator getRowComparator() { if(TableName.META_TABLE_NAME.equals(this)) { return KeyValue.META_COMPARATOR; } diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java index 2e67297..311de1c 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java @@ -56,7 +56,7 @@ public class KeyValueCodec implements Codec { // This is crass and will not work when KV changes. Also if passed a non-kv Cell, it will // make expensive copy. // Do not write tags over RPC - KeyValue.oswrite((KeyValue) KeyValueUtil.ensureKeyValue(cell), this.out, false); + KeyValueUtil.oswrite((KeyValue) KeyValueUtil.ensureKeyValue(cell), this.out, false); } } @@ -66,7 +66,7 @@ public class KeyValueCodec implements Codec { } protected Cell parseCell() throws IOException { - return KeyValue.iscreate(in); + return KeyValueUtil.iscreate(in); } } diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java index 5b532e9..2aaf844 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java @@ -62,7 +62,7 @@ public class KeyValueCodecWithTags implements Codec { // This is crass and will not work when KV changes. Also if passed a non-kv Cell, it will // make expensive copy. // Write tags - KeyValue.oswrite((KeyValue) KeyValueUtil.ensureKeyValue(cell), this.out, true); + KeyValueUtil.oswrite((KeyValue) KeyValueUtil.ensureKeyValue(cell), this.out, true); } } @@ -72,7 +72,7 @@ public class KeyValueCodecWithTags implements Codec { } protected Cell parseCell() throws IOException { - return KeyValue.iscreate(in); + return KeyValueUtil.iscreate(in); } } diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java index 4772358..cd36ace 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java @@ -24,13 +24,12 @@ import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.SettableSequenceId; -import org.apache.hadoop.hbase.KeyValue.KVComparator; -import org.apache.hadoop.hbase.KeyValue.SamePrefixComparator; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.SettableSequenceId; import org.apache.hadoop.hbase.io.TagCompressionContext; import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.HFileContext; @@ -515,14 +514,14 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { BufferedEncodedSeeker implements EncodedSeeker { protected HFileBlockDecodingContext decodingCtx; - protected final KVComparator comparator; - protected final SamePrefixComparator samePrefixComparator; + protected final CellComparator comparator; + protected final CellComparator samePrefixComparator; protected ByteBuffer currentBuffer; protected STATE current = createSeekerState(); // always valid protected STATE previous = createSeekerState(); // may not be valid protected TagCompressionContext tagCompressionContext = null; - public BufferedEncodedSeeker(KVComparator comparator, + public BufferedEncodedSeeker(CellComparator comparator, HFileBlockDecodingContext decodingCtx) { this.comparator = comparator; this.samePrefixComparator = comparator; @@ -545,13 +544,12 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { } @Override - public int compareKey(KVComparator comparator, byte[] key, int offset, int length) { - return comparator.compareFlatKey(key, offset, length, - current.keyBuffer, 0, current.keyLength); + public int compareKey(CellComparator comparator, byte[] key, int offset, int length) { + return compareKey(comparator, new KeyValue.KeyOnlyKeyValue(key, offset, length)); } @Override - public int compareKey(KVComparator comparator, Cell key) { + public int compareKey(CellComparator comparator, Cell key) { return comparator.compareOnlyKeyPortion(key, new KeyValue.KeyOnlyKeyValue(current.keyBuffer, 0, current.keyLength)); } @@ -698,9 +696,9 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { if (current.lastCommonPrefix <= 2) { rowCommonPrefix = 0; } - rowCommonPrefix += CellComparator.findCommonPrefixInRowPart(seekCell, currentCell, + rowCommonPrefix += CellUtil.findCommonPrefixInRowPart(seekCell, currentCell, rowCommonPrefix); - comp = CellComparator.compareCommonRowPrefix(seekCell, currentCell, rowCommonPrefix); + comp = samePrefixComparator.compareCommonRowPrefix(seekCell, currentCell, rowCommonPrefix); if (comp == 0) { comp = compareTypeBytes(seekCell, currentCell); if (comp == 0) { @@ -709,9 +707,9 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { 0, Math.min(familyCommonPrefix, current.lastCommonPrefix - (3 + currentCell.getRowLength()))); - familyCommonPrefix += CellComparator.findCommonPrefixInFamilyPart(seekCell, + familyCommonPrefix += CellUtil.findCommonPrefixInFamilyPart(seekCell, currentCell, familyCommonPrefix); - comp = CellComparator.compareCommonFamilyPrefix(seekCell, currentCell, + comp = samePrefixComparator.compareCommonFamilyPrefix(seekCell, currentCell, familyCommonPrefix); if (comp == 0) { // subtract the rowkey fixed length and the family key fixed @@ -722,12 +720,12 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { qualCommonPrefix, current.lastCommonPrefix - (3 + currentCell.getRowLength() + currentCell.getFamilyLength()))); - qualCommonPrefix += CellComparator.findCommonPrefixInQualifierPart(seekCell, + qualCommonPrefix += CellUtil.findCommonPrefixInQualifierPart(seekCell, currentCell, qualCommonPrefix); - comp = CellComparator.compareCommonQualifierPrefix(seekCell, currentCell, + comp = samePrefixComparator.compareCommonQualifierPrefix(seekCell, currentCell, qualCommonPrefix); if (comp == 0) { - comp = CellComparator.compareTimestamps(seekCell, currentCell); + comp = samePrefixComparator.compareTimestamps(seekCell, currentCell); if (comp == 0) { // Compare types. Let the delete types sort ahead of puts; // i.e. types diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java index f2bcae4..202723a 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java @@ -22,8 +22,8 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.WritableUtils; @@ -76,7 +76,7 @@ public class CopyKeyDataBlockEncoder extends BufferedDataBlockEncoder { } @Override - public EncodedSeeker createSeeker(KVComparator comparator, + public EncodedSeeker createSeeker(CellComparator comparator, final HFileBlockDecodingContext decodingCtx) { return new BufferedEncodedSeeker(comparator, decodingCtx) { @Override diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java index 99f6a7f..c8e1970 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java @@ -23,8 +23,8 @@ import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.io.hfile.HFileContext; /** @@ -99,7 +99,7 @@ public interface DataBlockEncoder { * @param decodingCtx * @return A newly created seeker. */ - EncodedSeeker createSeeker(KVComparator comparator, + EncodedSeeker createSeeker(CellComparator comparator, HFileBlockDecodingContext decodingCtx); /** @@ -219,8 +219,8 @@ public interface DataBlockEncoder { * @param length * @return -1 is the passed key is smaller than the current key, 0 if equal and 1 if greater */ - public int compareKey(KVComparator comparator, byte[] key, int offset, int length); + public int compareKey(CellComparator comparator, byte[] key, int offset, int length); - public int compareKey(KVComparator comparator, Cell key); + public int compareKey(CellComparator comparator, Cell key); } } diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java index fc4c314..1fe673d 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java @@ -22,8 +22,8 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; @@ -364,7 +364,7 @@ public class DiffKeyDeltaEncoder extends BufferedDataBlockEncoder { } @Override - public EncodedSeeker createSeeker(KVComparator comparator, + public EncodedSeeker createSeeker(CellComparator comparator, HFileBlockDecodingContext decodingCtx) { return new BufferedEncodedSeeker(comparator, decodingCtx) { private byte[] familyNameWithSize; diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java index c133308..53b082a 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java @@ -22,8 +22,8 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; @@ -388,7 +388,7 @@ public class FastDiffDeltaEncoder extends BufferedDataBlockEncoder { } @Override - public EncodedSeeker createSeeker(KVComparator comparator, + public EncodedSeeker createSeeker(CellComparator comparator, final HFileBlockDecodingContext decodingCtx) { return new BufferedEncodedSeeker(comparator, decodingCtx) { private void decode(boolean isFirst) { diff --git hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java index a6db8ee..9e11207 100644 --- hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java +++ hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java @@ -22,8 +22,8 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; @@ -147,7 +147,7 @@ public class PrefixKeyDeltaEncoder extends BufferedDataBlockEncoder { } @Override - public EncodedSeeker createSeeker(KVComparator comparator, + public EncodedSeeker createSeeker(CellComparator comparator, final HFileBlockDecodingContext decodingCtx) { return new BufferedEncodedSeeker(comparator, decodingCtx) { @Override diff --git hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java index 39e788a..81f6d8e 100644 --- hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java +++ hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java @@ -38,39 +38,41 @@ public class TestCellComparator { byte[] qual2 = Bytes.toBytes("qual2"); byte[] val = Bytes.toBytes("val"); + + private CellComparator comparator = new CellComparator(); @Test public void testCompareCells() { KeyValue kv1 = new KeyValue(row1, fam1, qual1, val); KeyValue kv2 = new KeyValue(row2, fam1, qual1, val); - assertTrue((CellComparator.compareStatic(kv1, kv2, false)) < 0); + assertTrue((comparator.compare(kv1, kv2)) < 0); kv1 = new KeyValue(row1, fam2, qual1, val); kv2 = new KeyValue(row1, fam1, qual1, val); - assertTrue((CellComparator.compareFamilies(kv1, kv2) > 0)); + assertTrue((comparator.compareFamilies(kv1, kv2) > 0)); kv1 = new KeyValue(row1, fam1, qual1, 1l, val); kv2 = new KeyValue(row1, fam1, qual1, 2l, val); - assertTrue((CellComparator.compareStatic(kv1, kv2, false) > 0)); + assertTrue((comparator.compare(kv1, kv2) > 0)); kv1 = new KeyValue(row1, fam1, qual1, 1l, Type.Put); kv2 = new KeyValue(row1, fam1, qual1, 1l, Type.Maximum); - assertTrue((CellComparator.compareStatic(kv1, kv2, false) > 0)); + assertTrue((comparator.compare(kv1, kv2) > 0)); kv1 = new KeyValue(row1, fam1, qual1, 1l, Type.Put); kv2 = new KeyValue(row1, fam_1_2, qual1, 1l, Type.Maximum); - assertTrue((CellComparator.compareCommonFamilyPrefix(kv1, kv2, 4) < 0)); + assertTrue((comparator.compareCommonFamilyPrefix(kv1, kv2, 4) < 0)); kv1 = new KeyValue(row1, fam1, qual1, 1l, Type.Put); kv2 = new KeyValue(row_1_0, fam_1_2, qual1, 1l, Type.Maximum); - assertTrue((CellComparator.compareCommonRowPrefix(kv1, kv2, 4) < 0)); + assertTrue((comparator.compareCommonRowPrefix(kv1, kv2, 4) < 0)); kv1 = new KeyValue(row1, fam1, qual2, 1l, Type.Put); kv2 = new KeyValue(row1, fam1, qual1, 1l, Type.Maximum); - assertTrue((CellComparator.compareCommonQualifierPrefix(kv1, kv2, 4) > 0)); + assertTrue((comparator.compareCommonQualifierPrefix(kv1, kv2, 4) > 0)); kv1 = new KeyValue(row1, fam1, qual1, 1l, Type.Put); kv2 = new KeyValue(row1, fam1, qual1, 1l, Type.Put); - assertTrue((CellComparator.equals(kv1, kv2))); + assertTrue((CellUtil.equals(kv1, kv2))); } } diff --git hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java index 467c7fe..71757a9 100644 --- hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java +++ hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java @@ -268,7 +268,7 @@ public class TestKeyValue extends TestCase { private final byte[] qualA = Bytes.toBytes("qfA"); private final byte[] qualB = Bytes.toBytes("qfB"); - private void assertKVLess(KeyValue.KVComparator c, + private void assertKVLess(CellComparator c, KeyValue less, KeyValue greater) { int cmp = c.compare(less,greater); @@ -277,20 +277,20 @@ public class TestKeyValue extends TestCase { assertTrue(cmp > 0); } - private void assertKVLessWithoutRow(KeyValue.KVComparator c, int common, KeyValue less, + private void assertKVLessWithoutRow(CellComparator c, int common, KeyValue less, KeyValue greater) { - int cmp = c.compareIgnoringPrefix(common, less.getBuffer(), less.getOffset() + int cmp = KeyValueUtil.compareIgnoringPrefix(common, less.getBuffer(), less.getOffset() + KeyValue.ROW_OFFSET, less.getKeyLength(), greater.getBuffer(), greater.getOffset() + KeyValue.ROW_OFFSET, greater.getKeyLength()); assertTrue(cmp < 0); - cmp = c.compareIgnoringPrefix(common, greater.getBuffer(), greater.getOffset() + cmp = KeyValueUtil.compareIgnoringPrefix(common, greater.getBuffer(), greater.getOffset() + KeyValue.ROW_OFFSET, greater.getKeyLength(), less.getBuffer(), less.getOffset() + KeyValue.ROW_OFFSET, less.getKeyLength()); assertTrue(cmp > 0); } public void testCompareWithoutRow() { - final KeyValue.KVComparator c = KeyValue.COMPARATOR; + final CellComparator c = KeyValue.COMPARATOR; byte[] row = Bytes.toBytes("row"); byte[] fa = Bytes.toBytes("fa"); @@ -337,7 +337,7 @@ public class TestKeyValue extends TestCase { } public void testFirstLastOnRow() { - final KVComparator c = KeyValue.COMPARATOR; + final CellComparator c = KeyValue.COMPARATOR; long ts = 1; byte[] bufferA = new byte[128]; int offsetA = 0; @@ -411,7 +411,7 @@ public class TestKeyValue extends TestCase { byte[] tmpArr = new byte[initialPadding + endingPadding + keyLen]; System.arraycopy(kv.getBuffer(), kv.getKeyOffset(), tmpArr, initialPadding, keyLen); - KeyValue kvFromKey = KeyValue.createKeyValueFromKey(tmpArr, initialPadding, + KeyValue kvFromKey = KeyValueUtil.createKeyValueFromKey(tmpArr, initialPadding, keyLen); assertEquals(keyLen, kvFromKey.getKeyLength()); assertEquals(KeyValue.ROW_OFFSET + keyLen, kvFromKey.getBuffer().length); @@ -440,7 +440,7 @@ public class TestKeyValue extends TestCase { * See HBASE-7845 */ public void testGetShortMidpointKey() { - final KVComparator keyComparator = KeyValue.COMPARATOR; + final KVComparator keyComparator = (KVComparator)KeyValue.COMPARATOR; //verify that faked shorter rowkey could be generated long ts = 5; KeyValue kv1 = new KeyValue(Bytes.toBytes("the quick brown fox"), family, qualA, ts, Type.Put); @@ -474,14 +474,14 @@ public class TestKeyValue extends TestCase { newKey = keyComparator.getShortMidpointKey(kv1.getKey(), kv2.getKey()); assertTrue(keyComparator.compareFlatKey(kv1.getKey(), newKey) < 0); assertTrue(keyComparator.compareFlatKey(newKey, kv2.getKey()) < 0); - KeyValue newKeyValue = KeyValue.createKeyValueFromKey(newKey); + KeyValue newKeyValue = KeyValueUtil.createKeyValueFromKey(newKey); assertTrue(Arrays.equals(newKeyValue.getFamily(),family)); assertTrue(Arrays.equals(newKeyValue.getQualifier(),qualB)); assertTrue(newKeyValue.getTimestamp() == HConstants.LATEST_TIMESTAMP); assertTrue(newKeyValue.getTypeByte() == Type.Maximum.getCode()); //verify metaKeyComparator's getShortMidpointKey output - final KVComparator metaKeyComparator = KeyValue.META_COMPARATOR; + final KVComparator metaKeyComparator = (KVComparator)KeyValue.META_COMPARATOR; kv1 = new KeyValue(Bytes.toBytes("ilovehbase123"), family, qualA, 5, Type.Put); kv2 = new KeyValue(Bytes.toBytes("ilovehbase234"), family, qualA, 0, Type.Put); newKey = metaKeyComparator.getShortMidpointKey(kv1.getKey(), kv2.getKey()); diff --git hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodec.java hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodec.java index bca57d9..ab3fd76 100644 --- hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodec.java +++ hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodec.java @@ -17,7 +17,9 @@ */ package org.apache.hadoop.hbase.codec; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -26,11 +28,9 @@ import java.io.DataOutputStream; import java.io.IOException; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.SmallTests; -import org.apache.hadoop.hbase.codec.CellCodec; -import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -111,13 +111,13 @@ public class TestCellCodec { Codec.Decoder decoder = codec.getDecoder(dis); assertTrue(decoder.advance()); Cell c = decoder.current(); - assertTrue(CellComparator.equals(c, kv1)); + assertTrue(CellUtil.equals(c, kv1)); assertTrue(decoder.advance()); c = decoder.current(); - assertTrue(CellComparator.equals(c, kv2)); + assertTrue(CellUtil.equals(c, kv2)); assertTrue(decoder.advance()); c = decoder.current(); - assertTrue(CellComparator.equals(c, kv3)); + assertTrue(CellUtil.equals(c, kv3)); assertFalse(decoder.advance()); dis.close(); assertEquals(offset, cis.getCount()); diff --git hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java index 1499a91..f6ada30 100644 --- hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java +++ hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.SmallTests; @@ -75,7 +75,7 @@ public class TestCellCodecWithTags { Codec.Decoder decoder = codec.getDecoder(dis); assertTrue(decoder.advance()); Cell c = decoder.current(); - assertTrue(CellComparator.equals(c, cell1)); + assertTrue(CellUtil.equals(c, cell1)); List tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(2, tags.size()); Tag tag = tags.get(0); @@ -86,7 +86,7 @@ public class TestCellCodecWithTags { assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), tag.getValue())); assertTrue(decoder.advance()); c = decoder.current(); - assertTrue(CellComparator.equals(c, cell2)); + assertTrue(CellUtil.equals(c, cell2)); tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(1, tags.size()); tag = tags.get(0); @@ -94,7 +94,7 @@ public class TestCellCodecWithTags { assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), tag.getValue())); assertTrue(decoder.advance()); c = decoder.current(); - assertTrue(CellComparator.equals(c, cell3)); + assertTrue(CellUtil.equals(c, cell3)); tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(3, tags.size()); tag = tags.get(0); diff --git hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java index d8dd7fe..107acac 100644 --- hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java +++ hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.SmallTests; @@ -75,7 +75,7 @@ public class TestKeyValueCodecWithTags { Codec.Decoder decoder = codec.getDecoder(dis); assertTrue(decoder.advance()); Cell c = decoder.current(); - assertTrue(CellComparator.equals(c, kv1)); + assertTrue(CellUtil.equals(c, kv1)); List tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(2, tags.size()); Tag tag = tags.get(0); @@ -86,7 +86,7 @@ public class TestKeyValueCodecWithTags { assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), tag.getValue())); assertTrue(decoder.advance()); c = decoder.current(); - assertTrue(CellComparator.equals(c, kv2)); + assertTrue(CellUtil.equals(c, kv2)); tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(1, tags.size()); tag = tags.get(0); @@ -94,7 +94,7 @@ public class TestKeyValueCodecWithTags { assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), tag.getValue())); assertTrue(decoder.advance()); c = decoder.current(); - assertTrue(CellComparator.equals(c, kv3)); + assertTrue(CellUtil.equals(c, kv3)); tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); assertEquals(3, tags.size()); tag = tags.get(0); diff --git hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java index 952a319..45b3611 100644 --- hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java +++ hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java @@ -20,14 +20,10 @@ package org.apache.hadoop.hbase.types; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; -import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.CellProtos; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.PositionedByteRange; -import org.apache.hadoop.hbase.util.SimplePositionedByteRange; +import org.apache.hadoop.hbase.util.*; import org.junit.Test; public class TestPBCell { @@ -48,6 +44,6 @@ public class TestPBCell { pbr.setPosition(0); decoded = CODEC.decode(pbr); assertEquals(encodedLength, pbr.getPosition()); - assertTrue(CellComparator.equals(cell, ProtobufUtil.toCell(decoded))); + assertTrue(CellUtil.equals(cell, ProtobufUtil.toCell(decoded))); } } diff --git hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java index 2a0c459..f268dcf 100644 --- hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java +++ hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java @@ -24,8 +24,8 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValue.MetaComparator; import org.apache.hadoop.hbase.KeyValue.RawBytesComparator; import org.apache.hadoop.hbase.KeyValueUtil; @@ -57,7 +57,7 @@ import org.apache.hadoop.io.WritableUtils; * created and recycled by static PtEncoderFactory and PtDecoderFactory. */ @InterfaceAudience.Private -public class PrefixTreeCodec implements DataBlockEncoder{ +public class PrefixTreeCodec implements DataBlockEncoder { /** * no-arg constructor for reflection @@ -149,7 +149,7 @@ public class PrefixTreeCodec implements DataBlockEncoder{ * the way to this point. */ @Override - public EncodedSeeker createSeeker(KVComparator comparator, HFileBlockDecodingContext decodingCtx) { + public EncodedSeeker createSeeker(CellComparator comparator, HFileBlockDecodingContext decodingCtx) { if (comparator instanceof RawBytesComparator){ throw new IllegalArgumentException("comparator must be KeyValue.KeyComparator"); } else if (comparator instanceof MetaComparator){ diff --git hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java index 1184af0..219b13d 100644 --- hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java +++ hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java @@ -22,9 +22,9 @@ import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory; import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher; @@ -239,10 +239,8 @@ public class PrefixTreeSeeker implements EncodedSeeker { } @Override - public int compareKey(KVComparator comparator, byte[] key, int offset, int length) { - // can't optimize this, make a copy of the key - ByteBuffer bb = getKeyDeepCopy(); - return comparator.compareFlatKey(key, offset, length, bb.array(), bb.arrayOffset(), bb.limit()); + public int compareKey(CellComparator comparator, byte[] key, int offset, int length) { + return compareKey(comparator, new KeyValue.KeyOnlyKeyValue(key, offset, length)); } @Override @@ -255,7 +253,8 @@ public class PrefixTreeSeeker implements EncodedSeeker { } @Override - public int compareKey(KVComparator comparator, Cell key) { + public int compareKey(CellComparator comparator, Cell key) { + // can't optimize this, make a copy of the key ByteBuffer bb = getKeyDeepCopy(); return comparator.compare(key, new KeyValue.KeyOnlyKeyValue(bb.array(), bb.arrayOffset(), bb.limit())); diff --git hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java index 5e8c2ec..85344aa 100644 --- hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java +++ hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java @@ -62,7 +62,6 @@ public class PrefixTreeArrayScanner extends PrefixTreeCell implements CellScanne protected int currentCellIndex; protected boolean movedToPrevious; - /*********************** construct ******************************/ // pass in blockMeta so we can initialize buffers big enough for all cells in the block @@ -430,7 +429,7 @@ public class PrefixTreeArrayScanner extends PrefixTreeCell implements CellScanne protected int populateNonRowFieldsAndCompareTo(int cellNum, Cell key) { populateNonRowFields(cellNum); - return CellComparator.compareStatic(this, key, true); + return CellComparator.CELL_COMPARATOR.compareOnlyKeyPortion(this, key); } protected void populateFirstNonRowFields() { diff --git hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java index c0f4060..3317ca3 100644 --- hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java +++ hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java @@ -44,7 +44,7 @@ public class PrefixTreeCell implements Cell, Comparable { //Same as KeyValue constructor. Only used to avoid NPE's when full cell hasn't been initialized. public static final KeyValue.Type DEFAULT_TYPE = KeyValue.Type.Put; - + /******************** fields ************************/ protected byte[] block; @@ -90,7 +90,7 @@ public class PrefixTreeCell implements Cell, Comparable { return false; } //Temporary hack to maintain backwards compatibility with KeyValue.equals - return CellComparator.equalsIgnoreMvccVersion(this, (Cell)obj); + return CellUtil.equalsIgnoreMvccVersion(this, (Cell)obj); //TODO return CellComparator.equals(this, (Cell)obj);//see HBASE-6907 } @@ -106,7 +106,7 @@ public class PrefixTreeCell implements Cell, Comparable { @Override public int compareTo(Cell other) { - return CellComparator.compareStatic(this, other, false); + return CellComparator.CELL_COMPARATOR.compare(this, other); } @Override diff --git hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/BaseTestRowData.java hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/BaseTestRowData.java index a895f9f..4d6011e 100644 --- hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/BaseTestRowData.java +++ hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/BaseTestRowData.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.codec.prefixtree.row; import java.util.List; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher; @@ -37,7 +37,7 @@ public abstract class BaseTestRowData implements TestRowData { for (int i = 1; i < inputs.size(); ++i) { KeyValue lastKv = inputs.get(i - 1); KeyValue kv = inputs.get(i); - if (!CellComparator.equalsRow(lastKv, kv)) { + if (!CellUtil.equalsRow(lastKv, kv)) { rowStartIndexes.add(i); } } diff --git hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java index d7652cd..8b644ef 100644 --- hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java +++ hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java @@ -25,7 +25,7 @@ import java.util.Collection; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.SmallTests; @@ -83,7 +83,7 @@ public class TestPrefixTreeSearcher { // check all 3 permutations of equals() Assert.assertEquals(inputCell, outputCell); Assert.assertEquals(outputCell, inputCell); - Assert.assertTrue(CellComparator.equals(inputCell, outputCell)); + Assert.assertTrue(CellUtil.equals(inputCell, outputCell)); } Assert.assertEquals(rows.getInputs().size(), i + 1); } finally { @@ -122,7 +122,7 @@ public class TestPrefixTreeSearcher { boolean hit = searcher.positionAt(kv); Assert.assertTrue(hit); Cell foundKv = searcher.current(); - Assert.assertTrue(CellComparator.equals(kv, foundKv)); + Assert.assertTrue(CellUtil.equals(kv, foundKv)); } } finally { DecoderFactory.checkIn(searcher); diff --git hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java index 5f319fc..5f44178 100644 --- hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java +++ hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition; @@ -73,11 +73,11 @@ public class TestRowDataSearcherRowMiss extends BaseTestRowData{ throw new RuntimeException(e); } Cell first = searcher.current(); - Assert.assertTrue(CellComparator.equals(d.get(0), first)); + Assert.assertTrue(CellUtil.equals(d.get(0), first)); //test first cell in second row Assert.assertTrue(searcher.positionAt(d.get(1))); - Assert.assertTrue(CellComparator.equals(d.get(1), searcher.current())); + Assert.assertTrue(CellUtil.equals(d.get(1), searcher.current())); testBetween1and2(searcher); testBetween2and3(searcher); @@ -99,12 +99,12 @@ public class TestRowDataSearcherRowMiss extends BaseTestRowData{ //test atOrBefore p = searcher.positionAtOrBefore(betweenAAndAAA); Assert.assertEquals(CellScannerPosition.BEFORE, p); - Assert.assertTrue(CellComparator.equals(searcher.current(), d.get(1))); + Assert.assertTrue(CellUtil.equals(searcher.current(), d.get(1))); //test atOrAfter p = searcher.positionAtOrAfter(betweenAAndAAA); Assert.assertEquals(CellScannerPosition.AFTER, p); - Assert.assertTrue(CellComparator.equals(searcher.current(), d.get(2))); + Assert.assertTrue(CellUtil.equals(searcher.current(), d.get(2))); } private void testBetween2and3(CellSearcher searcher){ @@ -117,12 +117,12 @@ public class TestRowDataSearcherRowMiss extends BaseTestRowData{ //test atOrBefore p = searcher.positionAtOrBefore(betweenAAAndB); Assert.assertEquals(CellScannerPosition.BEFORE, p); - Assert.assertTrue(CellComparator.equals(searcher.current(), d.get(2))); + Assert.assertTrue(CellUtil.equals(searcher.current(), d.get(2))); //test atOrAfter p = searcher.positionAtOrAfter(betweenAAAndB); Assert.assertEquals(CellScannerPosition.AFTER, p); - Assert.assertTrue(CellComparator.equals(searcher.current(), d.get(3))); + Assert.assertTrue(CellUtil.equals(searcher.current(), d.get(3))); } } diff --git hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java index 6c3750a..62234fc 100644 --- hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java +++ hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition; @@ -73,11 +73,11 @@ public class TestRowDataSimple extends BaseTestRowData { throw new RuntimeException(e); } Cell first = searcher.current(); - Assert.assertTrue(CellComparator.equals(d.get(0), first)); + Assert.assertTrue(CellUtil.equals(d.get(0), first)); // test first cell in second row Assert.assertTrue(searcher.positionAt(d.get(3))); - Assert.assertTrue(CellComparator.equals(d.get(3), searcher.current())); + Assert.assertTrue(CellUtil.equals(d.get(3), searcher.current())); Cell between4And5 = new KeyValue(rowB, cf, cq1, ts - 2, v0); @@ -87,12 +87,12 @@ public class TestRowDataSimple extends BaseTestRowData { // test atOrBefore p = searcher.positionAtOrBefore(between4And5); Assert.assertEquals(CellScannerPosition.BEFORE, p); - Assert.assertTrue(CellComparator.equals(searcher.current(), d.get(4))); + Assert.assertTrue(CellUtil.equals(searcher.current(), d.get(4))); // test atOrAfter p = searcher.positionAtOrAfter(between4And5); Assert.assertEquals(CellScannerPosition.AFTER, p); - Assert.assertTrue(CellComparator.equals(searcher.current(), d.get(5))); + Assert.assertTrue(CellUtil.equals(searcher.current(), d.get(5))); // test when key falls before first key in block Cell beforeFirst = new KeyValue(Bytes.toBytes("A"), cf, cq0, ts, v0); @@ -101,7 +101,7 @@ public class TestRowDataSimple extends BaseTestRowData { Assert.assertEquals(CellScannerPosition.BEFORE_FIRST, p); p = searcher.positionAtOrAfter(beforeFirst); Assert.assertEquals(CellScannerPosition.AFTER, p); - Assert.assertTrue(CellComparator.equals(searcher.current(), d.get(0))); + Assert.assertTrue(CellUtil.equals(searcher.current(), d.get(0))); Assert.assertEquals(d.get(0), searcher.current()); // test when key falls after last key in block @@ -111,7 +111,7 @@ public class TestRowDataSimple extends BaseTestRowData { Assert.assertEquals(CellScannerPosition.AFTER_LAST, p); p = searcher.positionAtOrBefore(afterLast); Assert.assertEquals(CellScannerPosition.BEFORE, p); - Assert.assertTrue(CellComparator.equals(searcher.current(), CollectionUtils.getLast(d))); + Assert.assertTrue(CellUtil.equals(searcher.current(), CollectionUtils.getLast(d))); } } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java index edfeb12..461c4c6 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java @@ -26,8 +26,10 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; @@ -75,7 +77,7 @@ public abstract class AbstractHFileReader protected int avgValueLen = -1; /** Key comparator */ - protected KVComparator comparator = new KVComparator(); + protected CellComparator comparator = new KVComparator(); /** Size of this file. */ protected final long fileSize; @@ -190,7 +192,7 @@ public abstract class AbstractHFileReader byte[] firstKey = getFirstKey(); if (firstKey == null) return null; - return KeyValue.createKeyValueFromKey(firstKey).getRow(); + return KeyValueUtil.createKeyValueFromKey(firstKey).getRow(); } /** @@ -204,7 +206,7 @@ public abstract class AbstractHFileReader byte[] lastKey = getLastKey(); if (lastKey == null) return null; - return KeyValue.createKeyValueFromKey(lastKey).getRow(); + return KeyValueUtil.createKeyValueFromKey(lastKey).getRow(); } /** @return number of KV entries in this HFile */ @@ -215,7 +217,7 @@ public abstract class AbstractHFileReader /** @return comparator */ @Override - public KVComparator getComparator() { + public CellComparator getComparator() { return comparator; } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java index 25e53cd..92be4a3 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java @@ -121,7 +121,7 @@ public abstract class AbstractHFileWriter implements HFile.Writer { this.blockEncoder = NoOpDataBlockEncoder.INSTANCE; } this.comparator = comparator != null ? comparator - : KeyValue.COMPARATOR; + : (KeyValue.KVComparator)KeyValue.COMPARATOR; closeOutputStream = path != null; this.cacheConf = cacheConf; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java index 1b065ec..5e2f289 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java @@ -26,13 +26,13 @@ import java.io.DataOutputStream; import java.io.IOException; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.protobuf.generated.HFileProtos; +import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; /** @@ -107,7 +107,8 @@ public class FixedFileTrailer { private long lastDataBlockOffset; /** Raw key comparator class name in version 3 */ - private String comparatorClassName = KeyValue.COMPARATOR.getLegacyKeyComparatorName(); + private String comparatorClassName = ((KeyValue.KVComparator) KeyValue.COMPARATOR) + .getLegacyKeyComparatorName(); /** The encryption key */ private byte[] encryptionKey; @@ -543,9 +544,11 @@ public class FixedFileTrailer { // HFile V2 legacy comparator class names. if (KeyValue.COMPARATOR.getClass().equals(klass)) { - comparatorClassName = KeyValue.COMPARATOR.getLegacyKeyComparatorName(); + comparatorClassName = ((KeyValue.KVComparator) KeyValue.COMPARATOR) + .getLegacyKeyComparatorName(); } else if (KeyValue.META_COMPARATOR.getClass().equals(klass)) { - comparatorClassName = KeyValue.META_COMPARATOR.getLegacyKeyComparatorName(); + comparatorClassName = ((KeyValue.KVComparator) KeyValue.META_COMPARATOR) + .getLegacyKeyComparatorName(); } else if (KeyValue.RAW_COMPARATOR.getClass().equals(klass)) { comparatorClassName = KeyValue.RAW_COMPARATOR.getLegacyKeyComparatorName(); } else { @@ -565,9 +568,11 @@ public class FixedFileTrailer { String comparatorClassName) throws IOException { try { // HFile V2 legacy comparator class names. - if (comparatorClassName.equals(KeyValue.COMPARATOR.getLegacyKeyComparatorName())) { + if (comparatorClassName.equals(((KeyValue.KVComparator) KeyValue.COMPARATOR) + .getLegacyKeyComparatorName())) { comparatorClassName = KeyValue.COMPARATOR.getClass().getName(); - } else if (comparatorClassName.equals(KeyValue.META_COMPARATOR.getLegacyKeyComparatorName())) { + } else if (comparatorClassName.equals(((KeyValue.KVComparator) KeyValue.META_COMPARATOR) + .getLegacyKeyComparatorName())) { comparatorClassName = KeyValue.META_COMPARATOR.getClass().getName(); } else if (comparatorClassName.equals(KeyValue.RAW_COMPARATOR.getLegacyKeyComparatorName())) { comparatorClassName = KeyValue.RAW_COMPARATOR.getClass().getName(); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java index c0dd672..51d062f 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java @@ -35,12 +35,8 @@ import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; -import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; -import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -51,6 +47,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; @@ -63,13 +60,13 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; import org.apache.hadoop.hbase.protobuf.generated.HFileProtos; import org.apache.hadoop.hbase.util.BloomFilterWriter; +import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.io.Writable; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; /** * File format for hbase. @@ -253,7 +250,7 @@ public class HFile { protected FileSystem fs; protected Path path; protected FSDataOutputStream ostream; - protected KVComparator comparator = KeyValue.COMPARATOR; + protected KVComparator comparator = (KVComparator)KeyValue.COMPARATOR; protected InetSocketAddress[] favoredNodes; private HFileContext fileContext; @@ -384,7 +381,7 @@ public class HFile { */ String getName(); - KVComparator getComparator(); + CellComparator getComparator(); HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java index f7b5b9d..5d6b555 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java @@ -37,9 +37,9 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; @@ -108,7 +108,7 @@ public class HFileBlockIndex { */ public static class BlockIndexReader implements HeapSize { /** Needed doing lookup on blocks. */ - private final KVComparator comparator; + private final CellComparator comparator; // Root-level data. private byte[][] blockKeys; @@ -133,13 +133,13 @@ public class HFileBlockIndex { /** A way to read {@link HFile} blocks at a given offset */ private CachingBlockReader cachingBlockReader; - public BlockIndexReader(final KVComparator c, final int treeLevel, + public BlockIndexReader(final CellComparator c, final int treeLevel, final CachingBlockReader cachingBlockReader) { this(c, treeLevel); this.cachingBlockReader = cachingBlockReader; } - public BlockIndexReader(final KVComparator c, final int treeLevel) + public BlockIndexReader(final CellComparator c, final int treeLevel) { comparator = c; searchTreeLevel = treeLevel; @@ -511,7 +511,7 @@ public class HFileBlockIndex { * @throws IOException */ static int binarySearchNonRootIndex(Cell key, ByteBuffer nonRootIndex, - KVComparator comparator) { + CellComparator comparator) { int numEntries = nonRootIndex.getInt(0); int low = 0; @@ -598,7 +598,7 @@ public class HFileBlockIndex { * */ static int locateNonRootIndexEntry(ByteBuffer nonRootBlock, Cell key, - KVComparator comparator) { + CellComparator comparator) { int entryIndex = binarySearchNonRootIndex(key, nonRootBlock, comparator); if (entryIndex != -1) { diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java index 1292319..0ed0897 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java @@ -29,9 +29,9 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; @@ -731,10 +731,10 @@ public class HFileReaderV2 extends AbstractHFileReader { * @param length * @return -1 is the passed key is smaller than the current key, 0 if equal and 1 if greater */ - public abstract int compareKey(KVComparator comparator, byte[] key, int offset, + public abstract int compareKey(CellComparator comparator, byte[] key, int offset, int length); - public abstract int compareKey(KVComparator comparator, Cell kv); + public abstract int compareKey(CellComparator comparator, Cell kv); } /** @@ -776,7 +776,7 @@ public class HFileReaderV2 extends AbstractHFileReader { } @Override - public int compareKey(KVComparator comparator, byte[] key, int offset, int length) { + public int compareKey(CellComparator comparator, byte[] key, int offset, int length) { return comparator.compareFlatKey(key, offset, length, blockBuffer.array(), blockBuffer.arrayOffset() + blockBuffer.position() + KEY_VALUE_LEN_SIZE, currKeyLen); } @@ -1081,7 +1081,7 @@ public class HFileReaderV2 extends AbstractHFileReader { } @Override - public int compareKey(KVComparator comparator, Cell key) { + public int compareKey(CellComparator comparator, Cell key) { return comparator.compareOnlyKeyPortion( key, new KeyValue.KeyOnlyKeyValue(blockBuffer.array(), blockBuffer.arrayOffset() @@ -1201,7 +1201,7 @@ public class HFileReaderV2 extends AbstractHFileReader { } @Override - public int compareKey(KVComparator comparator, byte[] key, int offset, int length) { + public int compareKey(CellComparator comparator, byte[] key, int offset, int length) { return seeker.compareKey(comparator, key, offset, length); } @@ -1257,7 +1257,7 @@ public class HFileReaderV2 extends AbstractHFileReader { } @Override - public int compareKey(KVComparator comparator, Cell key) { + public int compareKey(CellComparator comparator, Cell key) { return seeker.compareKey(comparator, key); } } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSerialization.java hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSerialization.java index c6e5adc..a0a60eb 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSerialization.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSerialization.java @@ -24,6 +24,7 @@ import java.io.InputStream; import java.io.OutputStream; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.io.serializer.Deserializer; import org.apache.hadoop.io.serializer.Serialization; import org.apache.hadoop.io.serializer.Serializer; @@ -55,7 +56,7 @@ public class KeyValueSerialization implements Serialization { @Override public KeyValue deserialize(KeyValue ignore) throws IOException { // I can't overwrite the passed in KV, not from a proto kv, not just yet. TODO - return KeyValue.create(this.dis); + return KeyValueUtil.create(this.dis); } @Override @@ -79,7 +80,7 @@ public class KeyValueSerialization implements Serialization { @Override public void serialize(KeyValue kv) throws IOException { - KeyValue.write(kv, this.dos); + KeyValueUtil.write(kv, this.dos); } } } \ No newline at end of file diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java index 759f842..3f296f8 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java @@ -34,6 +34,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; @@ -83,7 +84,7 @@ public class DefaultMemStore implements MemStore { // Snapshot of memstore. Made for flusher. volatile KeyValueSkipListSet snapshot; - final KeyValue.KVComparator comparator; + final CellComparator comparator; // Used to track own heapSize final AtomicLong size; @@ -111,7 +112,7 @@ public class DefaultMemStore implements MemStore { * @param c Comparator */ public DefaultMemStore(final Configuration conf, - final KeyValue.KVComparator c) { + final CellComparator c) { this.conf = conf; this.comparator = c; this.kvset = new KeyValueSkipListSet(c); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreEngine.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreEngine.java index f2a0b06..705c664 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreEngine.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreEngine.java @@ -24,11 +24,11 @@ import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext; +import org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor; import org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy; import org.apache.hadoop.hbase.regionserver.compactions.RatioBasedCompactionPolicy; -import org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor; import org.apache.hadoop.hbase.util.ReflectionUtils; /** @@ -61,7 +61,7 @@ public class DefaultStoreEngine extends StoreEngine< @Override protected void createComponents( - Configuration conf, Store store, KVComparator kvComparator) throws IOException { + Configuration conf, Store store, CellComparator kvComparator) throws IOException { storeFileManager = new DefaultStoreFileManager(kvComparator, conf); String className = conf.get(DEFAULT_COMPACTOR_CLASS_KEY, DEFAULT_COMPACTOR_CLASS.getName()); try { diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java index 4261860..0dc5336 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java @@ -29,8 +29,8 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; @@ -43,7 +43,7 @@ import com.google.common.collect.Lists; class DefaultStoreFileManager implements StoreFileManager { static final Log LOG = LogFactory.getLog(DefaultStoreFileManager.class); - private final KVComparator kvComparator; + private final CellComparator kvComparator; private final Configuration conf; /** @@ -52,7 +52,7 @@ class DefaultStoreFileManager implements StoreFileManager { */ private volatile ImmutableList storefiles = null; - public DefaultStoreFileManager(KVComparator kvComparator, Configuration conf) { + public DefaultStoreFileManager(CellComparator kvComparator, Configuration conf) { this.kvComparator = kvComparator; this.conf = conf; } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java index ec676fa..dd3715e 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.util.Bytes; /** @@ -44,7 +45,7 @@ class GetClosestRowBeforeTracker { // Any cell w/ a ts older than this is expired. private final long oldestts; private Cell candidate = null; - private final KVComparator kvcomparator; + private final CellComparator kvcomparator; // Flag for whether we're doing getclosest on a metaregion. private final boolean metaregion; // Offset and length into targetkey demarking table name (if in a metaregion). @@ -61,7 +62,7 @@ class GetClosestRowBeforeTracker { * @param ttl Time to live in ms for this Store * @param metaregion True if this is hbase:meta or -ROOT- region. */ - GetClosestRowBeforeTracker(final KVComparator c, final KeyValue kv, + GetClosestRowBeforeTracker(final CellComparator c, final KeyValue kv, final long ttl, final boolean metaregion) { super(); this.metaregion = metaregion; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index b879e8a..68f84d4 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -66,6 +66,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompoundConfiguration; @@ -288,7 +289,7 @@ public class HRegion implements HeapSize { // , Writable{ private final HRegionFileSystem fs; protected final Configuration conf; private final Configuration baseConf; - private final KeyValue.KVComparator comparator; + private final CellComparator comparator; private final int rowLockWaitDuration; static final int DEFAULT_ROWLOCK_WAIT_DURATION = 30000; @@ -1448,7 +1449,7 @@ public class HRegion implements HeapSize { // , Writable{ /** * @return KeyValue Comparator */ - public KeyValue.KVComparator getComparator() { + public CellComparator getComparator() { return this.comparator; } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index 8d20d7b..6019dbb 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -49,6 +49,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompoundConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -173,7 +174,7 @@ public class HStore implements Store { private int bytesPerChecksum; // Comparing KeyValues - private final KeyValue.KVComparator comparator; + private final CellComparator comparator; final StoreEngine storeEngine; @@ -240,7 +241,7 @@ public class HStore implements Store { scanInfo = new ScanInfo(family, ttl, timeToPurgeDeletes, this.comparator); String className = conf.get(MEMSTORE_CLASS_NAME, DefaultMemStore.class.getName()); this.memstore = ReflectionUtils.instantiateWithCustomCtor(className, new Class[] { - Configuration.class, KeyValue.KVComparator.class }, new Object[] { conf, this.comparator }); + Configuration.class, CellComparator.class }, new Object[] { conf, this.comparator }); this.offPeakHours = OffPeakHours.getInstance(conf); // Setting up cache configuration for this family @@ -692,7 +693,7 @@ public class HStore implements Store { Preconditions.checkState(firstKey != null, "First key can not be null"); byte[] lk = reader.getLastKey(); Preconditions.checkState(lk != null, "Last key can not be null"); - byte[] lastKey = KeyValue.createKeyValueFromKey(lk).getRow(); + byte[] lastKey = KeyValueUtil.createKeyValueFromKey(lk).getRow(); LOG.debug("HFile bounds: first=" + Bytes.toStringBinary(firstKey) + " last=" + Bytes.toStringBinary(lastKey)); @@ -1736,9 +1737,9 @@ public class HStore implements Store { // TODO: Cache these keys rather than make each time? byte [] fk = r.getFirstKey(); if (fk == null) return false; - KeyValue firstKV = KeyValue.createKeyValueFromKey(fk, 0, fk.length); + KeyValue firstKV = KeyValueUtil.createKeyValueFromKey(fk, 0, fk.length); byte [] lk = r.getLastKey(); - KeyValue lastKV = KeyValue.createKeyValueFromKey(lk, 0, lk.length); + KeyValue lastKV = KeyValueUtil.createKeyValueFromKey(lk, 0, lk.length); KeyValue firstOnRow = state.getTargetKey(); if (this.comparator.compareRows(lastKV, firstOnRow) < 0) { // If last key in file is not of the target table, no candidates in this @@ -2151,7 +2152,7 @@ public class HStore implements Store { } @Override - public KeyValue.KVComparator getComparator() { + public CellComparator getComparator() { return comparator; } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java index 69bfcdf..16ca095 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java @@ -26,7 +26,7 @@ import java.util.PriorityQueue; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.CellComparator; /** * Implements a heap merge across any number of KeyValueScanners. @@ -66,7 +66,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner * @param comparator */ public KeyValueHeap(List scanners, - KVComparator comparator) throws IOException { + CellComparator comparator) throws IOException { this(scanners, new KVScannerComparator(comparator)); } @@ -169,12 +169,12 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner } protected static class KVScannerComparator implements Comparator { - protected KVComparator kvComparator; + protected CellComparator kvComparator; /** * Constructor * @param kvComparator */ - public KVScannerComparator(KVComparator kvComparator) { + public KVScannerComparator(CellComparator kvComparator) { this.kvComparator = kvComparator; } public int compare(KeyValueScanner left, KeyValueScanner right) { @@ -207,7 +207,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner /** * @return KVComparator */ - public KVComparator getComparator() { + public CellComparator getComparator() { return this.kvComparator; } } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueSkipListSet.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueSkipListSet.java index 1297ce2..807bf22 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueSkipListSet.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueSkipListSet.java @@ -18,9 +18,6 @@ */ package org.apache.hadoop.hbase.regionserver; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hbase.KeyValue; - import java.util.Collection; import java.util.Comparator; import java.util.Iterator; @@ -29,6 +26,10 @@ import java.util.SortedSet; import java.util.concurrent.ConcurrentNavigableMap; import java.util.concurrent.ConcurrentSkipListMap; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.KeyValue; + /** * A {@link java.util.Set} of {@link KeyValue}s implemented on top of a * {@link java.util.concurrent.ConcurrentSkipListMap}. Works like a @@ -47,7 +48,7 @@ import java.util.concurrent.ConcurrentSkipListMap; public class KeyValueSkipListSet implements NavigableSet { private final ConcurrentNavigableMap delegatee; - KeyValueSkipListSet(final KeyValue.KVComparator c) { + KeyValueSkipListSet(final CellComparator c) { this.delegatee = new ConcurrentSkipListMap(c); } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.java index ba4f309..b749869 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.java @@ -24,8 +24,8 @@ import java.util.List; import org.apache.commons.lang.NotImplementedException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.KeyValue.KVComparator; /** * ReversedKeyValueHeap is used for supporting reversed scanning. Compared with @@ -43,7 +43,7 @@ public class ReversedKeyValueHeap extends KeyValueHeap { * @throws IOException */ public ReversedKeyValueHeap(List scanners, - KVComparator comparator) throws IOException { + CellComparator comparator) throws IOException { super(scanners, new ReversedKVScannerComparator(comparator)); } @@ -162,7 +162,7 @@ public class ReversedKeyValueHeap extends KeyValueHeap { * Constructor * @param kvComparator */ - public ReversedKVScannerComparator(KVComparator kvComparator) { + public ReversedKVScannerComparator(CellComparator kvComparator) { super(kvComparator); } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedStoreScanner.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedStoreScanner.java index 488964d..7839c1a 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedStoreScanner.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedStoreScanner.java @@ -24,10 +24,9 @@ import java.util.NavigableSet; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.client.Scan; /** @@ -63,7 +62,7 @@ class ReversedStoreScanner extends StoreScanner implements KeyValueScanner { @Override protected void resetKVHeap(List scanners, - KVComparator comparator) throws IOException { + CellComparator comparator) throws IOException { // Combine all seeked scanners with a heap heap = new ReversedKeyValueHeap(scanners, comparator); } @@ -100,7 +99,7 @@ class ReversedStoreScanner extends StoreScanner implements KeyValueScanner { @Override protected void checkScanOrder(Cell prevKV, Cell kv, - KeyValue.KVComparator comparator) throws IOException { + CellComparator comparator) throws IOException { // Check that the heap gives us KVs in an increasing order for same row and // decreasing order for different rows. assert prevKV == null || comparator == null || comparator.compareRows(kv, prevKV) < 0 diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanInfo.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanInfo.java index 8adc21b..ce91181 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanInfo.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanInfo.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.regionserver; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.HColumnDescriptor; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; @@ -35,7 +35,7 @@ public class ScanInfo { private long ttl; private boolean keepDeletedCells; private long timeToPurgeDeletes; - private KVComparator comparator; + private CellComparator comparator; public static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT + (2 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_INT) @@ -49,7 +49,7 @@ public class ScanInfo { * @param comparator The store's comparator */ public ScanInfo(final HColumnDescriptor family, final long ttl, final long timeToPurgeDeletes, - final KVComparator comparator) { + final CellComparator comparator) { this(family.getName(), family.getMinVersions(), family.getMaxVersions(), ttl, family .getKeepDeletedCells(), timeToPurgeDeletes, comparator); } @@ -66,7 +66,7 @@ public class ScanInfo { */ public ScanInfo(final byte[] family, final int minVersions, final int maxVersions, final long ttl, final boolean keepDeletedCells, final long timeToPurgeDeletes, - final KVComparator comparator) { + final CellComparator comparator) { this.family = family; this.minVersions = minVersions; this.maxVersions = maxVersions; @@ -100,7 +100,7 @@ public class ScanInfo { return timeToPurgeDeletes; } - public KVComparator getComparator() { + public CellComparator getComparator() { return comparator; } } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java index f2e8bfb..bc0aa9d 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java @@ -24,9 +24,9 @@ import java.util.NavigableSet; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.Filter; @@ -84,7 +84,7 @@ public class ScanQueryMatcher { private final Cell startKey; /** Row comparator for the region this query is for */ - private final KeyValue.KVComparator rowComparator; + private final CellComparator rowComparator; /* row is not private for tests */ /** Row the query is on */ diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java index 4f0c5fb..7ec088c 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java @@ -27,6 +27,7 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.KeyValue; @@ -56,7 +57,7 @@ public interface Store extends HeapSize, StoreConfigInformation { int NO_PRIORITY = Integer.MIN_VALUE; // General Accessors - KeyValue.KVComparator getComparator(); + CellComparator getComparator(); Collection getStorefiles(); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreEngine.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreEngine.java index 4a7b1c8..4896d68 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreEngine.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreEngine.java @@ -24,7 +24,7 @@ import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext; import org.apache.hadoop.hbase.regionserver.compactions.CompactionPolicy; import org.apache.hadoop.hbase.regionserver.compactions.Compactor; @@ -97,10 +97,10 @@ public abstract class StoreEngine create( - Store store, Configuration conf, KVComparator kvComparator) throws IOException { + Store store, Configuration conf, CellComparator kvComparator) throws IOException { String className = conf.get(STORE_ENGINE_CLASS_KEY, DEFAULT_STORE_ENGINE_CLASS.getName()); try { StoreEngine se = ReflectionUtils.instantiateWithCustomCtor( diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java index 27c64f0..a6c1ef0 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java @@ -37,6 +37,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HDFSBlocksDistribution; @@ -515,7 +516,7 @@ public class StoreFile { private final CacheConfig cacheConf; private final FileSystem fs; - private KeyValue.KVComparator comparator = KeyValue.COMPARATOR; + private CellComparator comparator = KeyValue.COMPARATOR; private BloomType bloomType = BloomType.NONE; private long maxKeyCount = 0; private Path dir; @@ -562,7 +563,7 @@ public class StoreFile { return this; } - public WriterBuilder withComparator(KeyValue.KVComparator comparator) { + public WriterBuilder withComparator(CellComparator comparator) { Preconditions.checkNotNull(comparator); this.comparator = comparator; return this; @@ -617,7 +618,7 @@ public class StoreFile { comparator = KeyValue.COMPARATOR; } return new Writer(fs, filePath, - conf, cacheConf, comparator, bloomType, maxKeyCount, favoredNodes, fileContext); + conf, cacheConf, (KVComparator)comparator, bloomType, maxKeyCount, favoredNodes, fileContext); } } @@ -647,7 +648,7 @@ public class StoreFile { * @return The split point row, or null if splitting is not possible, or reader is null. */ @SuppressWarnings("deprecation") - byte[] getFileSplitPoint(KVComparator comparator) throws IOException { + byte[] getFileSplitPoint(CellComparator comparator) throws IOException { if (this.reader == null) { LOG.warn("Storefile " + this + " Reader is null; cannot get split point"); return null; @@ -657,11 +658,11 @@ public class StoreFile { // the row we want to split on as midkey. byte [] midkey = this.reader.midkey(); if (midkey != null) { - KeyValue mk = KeyValue.createKeyValueFromKey(midkey, 0, midkey.length); + KeyValue mk = KeyValueUtil.createKeyValueFromKey(midkey, 0, midkey.length); byte [] fk = this.reader.getFirstKey(); - KeyValue firstKey = KeyValue.createKeyValueFromKey(fk, 0, fk.length); + KeyValue firstKey = KeyValueUtil.createKeyValueFromKey(fk, 0, fk.length); byte [] lk = this.reader.getLastKey(); - KeyValue lastKey = KeyValue.createKeyValueFromKey(lk, 0, lk.length); + KeyValue lastKey = KeyValueUtil.createKeyValueFromKey(lk, 0, lk.length); // if the midkey is the same as the first or last keys, we cannot (ever) split this region. if (comparator.compareRows(mk, firstKey) == 0 || comparator.compareRows(mk, lastKey) == 0) { if (LOG.isDebugEnabled()) { @@ -1031,7 +1032,7 @@ public class StoreFile { this.reader = null; } - public KVComparator getComparator() { + public CellComparator getComparator() { return reader.getComparator(); } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java index aa351d3..aae66cf 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java @@ -30,6 +30,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -369,7 +370,7 @@ public class StoreFileScanner implements KeyValueScanner { return reader; } - KeyValue.KVComparator getComparator() { + CellComparator getComparator() { return reader.getComparator(); } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java index 7160b30..ae6a8d7 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java @@ -31,11 +31,11 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.client.IsolationLevel; import org.apache.hadoop.hbase.client.Scan; @@ -342,7 +342,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner } protected void resetKVHeap(List scanners, - KVComparator comparator) throws IOException { + CellComparator comparator) throws IOException { // Combine all seeked scanners with a heap heap = new KeyValueHeap(scanners, comparator); } @@ -476,7 +476,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner Cell cell; // Only do a sanity-check if store and comparator are available. - KeyValue.KVComparator comparator = + CellComparator comparator = store != null ? store.getComparator() : null; int count = 0; @@ -687,7 +687,7 @@ public class StoreScanner extends NonReversedNonLazyKeyValueScanner * @throws IOException */ protected void checkScanOrder(Cell prevKV, Cell kv, - KeyValue.KVComparator comparator) throws IOException { + CellComparator comparator) throws IOException { // Check that the heap gives us KVs in an increasing order. assert prevKV == null || comparator == null || comparator.compare(prevKV, kv) <= 0 : "Key " + prevKV diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java index 891bb6b..8021da8 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java @@ -25,8 +25,8 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.regionserver.compactions.Compactor; import org.apache.hadoop.hbase.util.Bytes; @@ -38,7 +38,7 @@ public abstract class StripeMultiFileWriter implements Compactor.CellSink { /** Factory that is used to produce single StoreFile.Writer-s */ protected WriterFactory writerFactory; - protected KVComparator comparator; + protected CellComparator comparator; protected List existingWriters; protected List boundaries; @@ -58,7 +58,7 @@ public abstract class StripeMultiFileWriter implements Compactor.CellSink { * @param factory Factory used to produce individual file writers. * @param comparator Comparator used to compare rows. */ - public void init(StoreScanner sourceScanner, WriterFactory factory, KVComparator comparator) + public void init(StoreScanner sourceScanner, WriterFactory factory, CellComparator comparator) throws IOException { this.writerFactory = factory; this.sourceScanner = sourceScanner; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java index fe54a93..41a3c02 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java @@ -26,7 +26,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy; @@ -55,7 +55,7 @@ public class StripeStoreEngine extends StoreEngine newTags = new ArrayList(); Tag replayTag = new Tag(TagType.LOG_REPLAY_TAG_TYPE, Bytes.toBytes(seqId)); newTags.add(replayTag); - return KeyValue.cloneAndAddTags(cell, newTags); + return CellUtil.cloneAndAddTags(cell, newTags); } return cell; } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java index 93c33ad..831a283 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java @@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.codec.BaseDecoder; import org.apache.hadoop.hbase.codec.BaseEncoder; import org.apache.hadoop.hbase.codec.Codec; @@ -342,7 +343,7 @@ public class WALCellCodec implements Codec { if (!(cell instanceof KeyValue)) throw new IOException("Cannot write non-KV cells to WAL"); checkFlushed(); // Make sure to write tags into WAL - KeyValue.oswrite((KeyValue) cell, this.out, true); + KeyValueUtil.oswrite((KeyValue) cell, this.out, true); } } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java index 9ba3353..e6479f1 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor; @@ -172,7 +173,7 @@ public class WALEdit implements Writable, HeapSize { if (compressionContext != null) { this.add(KeyValueCompression.readKV(in, compressionContext)); } else { - this.add(KeyValue.create(in)); + this.add(KeyValueUtil.create(in)); } } int numFamilies = in.readInt(); @@ -189,7 +190,7 @@ public class WALEdit implements Writable, HeapSize { } else { // this is an old style HLog entry. The int that we just // read is actually the length of a single KeyValue - this.add(KeyValue.create(versionOrLength, in)); + this.add(KeyValueUtil.create(versionOrLength, in)); } } @@ -203,7 +204,7 @@ public class WALEdit implements Writable, HeapSize { if (compressionContext != null) { KeyValueCompression.writeKV(out, kv, compressionContext); } else{ - KeyValue.write(kv, out); + KeyValueUtil.write(kv, out); } } if (scopes == null) { diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterBase.java hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterBase.java index 7d00d13..dc062b2 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterBase.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterBase.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.util; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.CellComparator; /** * Common methods Bloom filter methods required at read and write time. @@ -52,6 +52,6 @@ public interface BloomFilterBase { /** * @return Bloom key comparator */ - KVComparator getComparator(); + CellComparator getComparator(); } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java hbase-server/src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java index 72751d8..c035e54 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java @@ -26,6 +26,7 @@ import java.util.SortedSet; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.regionserver.NonReversedNonLazyKeyValueScanner; @@ -36,7 +37,7 @@ import org.apache.hadoop.hbase.regionserver.NonReversedNonLazyKeyValueScanner; @InterfaceAudience.Private public class CollectionBackedScanner extends NonReversedNonLazyKeyValueScanner { final private Iterable data; - final KeyValue.KVComparator comparator; + final CellComparator comparator; private Iterator iter; private KeyValue current; @@ -45,7 +46,7 @@ public class CollectionBackedScanner extends NonReversedNonLazyKeyValueScanner { } public CollectionBackedScanner(SortedSet set, - KeyValue.KVComparator comparator) { + CellComparator comparator) { this.comparator = comparator; data = set; init(); @@ -56,14 +57,14 @@ public class CollectionBackedScanner extends NonReversedNonLazyKeyValueScanner { } public CollectionBackedScanner(List list, - KeyValue.KVComparator comparator) { + CellComparator comparator) { Collections.sort(list, comparator); this.comparator = comparator; data = list; init(); } - public CollectionBackedScanner(KeyValue.KVComparator comparator, + public CollectionBackedScanner(CellComparator comparator, KeyValue... array) { this.comparator = comparator; diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilter.java hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilter.java index 8e87132..fe7b064 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilter.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilter.java @@ -24,7 +24,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.FixedFileTrailer; import org.apache.hadoop.hbase.io.hfile.HFile; @@ -132,7 +132,7 @@ public class CompoundBloomFilter extends CompoundBloomFilterBase } @Override - public KVComparator getComparator() { + public CellComparator getComparator() { return comparator; } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterBase.java hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterBase.java index d5affb7..3a5371a 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterBase.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterBase.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hbase.util; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValueUtil; @InterfaceAudience.Private @@ -52,7 +52,7 @@ public class CompoundBloomFilterBase implements BloomFilterBase { protected int hashType; /** Comparator used to compare Bloom filter keys */ - protected KVComparator comparator; + protected CellComparator comparator; @Override public long getMaxKeys() { @@ -90,7 +90,7 @@ public class CompoundBloomFilterBase implements BloomFilterBase { } @Override - public KVComparator getComparator() { + public CellComparator getComparator() { return comparator; } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java index f6709bd..8bc5e33 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java @@ -28,7 +28,7 @@ import java.util.Queue; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex; import org.apache.hadoop.hbase.io.hfile.InlineBlockWriter; @@ -89,7 +89,7 @@ public class CompoundBloomFilterWriter extends CompoundBloomFilterBase */ public CompoundBloomFilterWriter(int chunkByteSizeHint, float errorRate, int hashType, int maxFold, boolean cacheOnWrite, - KVComparator comparator) { + CellComparator comparator) { chunkByteSize = ByteBloomFilter.computeFoldableByteSize( chunkByteSizeHint * 8L, maxFold); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index efcd7cd..6f553bc 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -67,6 +67,7 @@ import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.RegionLocations; import org.apache.hadoop.hbase.ServerName; @@ -81,10 +82,10 @@ import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.MetaScanner; -import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor; import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitorBase; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.io.hfile.CacheConfig; @@ -660,9 +661,9 @@ public class HBaseFsck extends Configured { CacheConfig cacheConf = new CacheConfig(getConf()); hf = HFile.createReader(fs, hfile.getPath(), cacheConf, getConf()); hf.loadFileInfo(); - KeyValue startKv = KeyValue.createKeyValueFromKey(hf.getFirstKey()); + KeyValue startKv = KeyValueUtil.createKeyValueFromKey(hf.getFirstKey()); start = startKv.getRow(); - KeyValue endKv = KeyValue.createKeyValueFromKey(hf.getLastKey()); + KeyValue endKv = KeyValueUtil.createKeyValueFromKey(hf.getLastKey()); end = endKv.getRow(); } catch (IOException ioe) { LOG.warn("Problem reading orphan file " + hfile + ", skipping"); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/TestSerialization.java hbase-server/src/test/java/org/apache/hadoop/hbase/TestSerialization.java index b46ec28..5b88398 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/TestSerialization.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/TestSerialization.java @@ -67,12 +67,12 @@ public class TestSerialization { KeyValue kv = new KeyValue(row, fam, qf, ts, val); ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); - long l = KeyValue.write(kv, dos); + long l = KeyValueUtil.write(kv, dos); dos.close(); byte [] mb = baos.toByteArray(); ByteArrayInputStream bais = new ByteArrayInputStream(mb); DataInputStream dis = new DataInputStream(bais); - KeyValue deserializedKv = KeyValue.create(dis); + KeyValue deserializedKv = KeyValueUtil.create(dis); assertTrue(Bytes.equals(kv.getBuffer(), deserializedKv.getBuffer())); assertEquals(kv.getOffset(), deserializedKv.getOffset()); assertEquals(kv.getLength(), deserializedKv.getLength()); @@ -91,8 +91,8 @@ public class TestSerialization { long l = 0; try { - l = KeyValue.oswrite(kv_0, dos, false); - l += KeyValue.oswrite(kv_1, dos, false); + l = KeyValueUtil.oswrite(kv_0, dos, false); + l += KeyValueUtil.oswrite(kv_1, dos, false); assertEquals(100L, l); } catch (IOException e) { fail("Unexpected IOException" + e.getMessage()); @@ -102,7 +102,7 @@ public class TestSerialization { DataInputStream dis = new DataInputStream(bais); try { - KeyValue.create(dis); + KeyValueUtil.create(dis); assertTrue(kv_0.equals(kv_1)); } catch (Exception e) { fail("Unexpected Exception" + e.getMessage()); @@ -111,7 +111,7 @@ public class TestSerialization { // length -1 try { // even if we have a good kv now in dis we will just pass length with -1 for simplicity - KeyValue.create(-1, dis); + KeyValueUtil.create(-1, dis); fail("Expected corrupt stream"); } catch (Exception e) { assertEquals("Failed read -1 bytes, stream corrupt?", e.getMessage()); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java index 97d08ac..b971fcf 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java @@ -30,11 +30,9 @@ import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.SmallTests; -import org.apache.hadoop.hbase.codec.Codec; -import org.apache.hadoop.hbase.codec.MessageCodec; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -108,13 +106,13 @@ public class TestCellMessageCodec { Codec.Decoder decoder = cmc.getDecoder(dis); assertTrue(decoder.advance()); Cell c = decoder.current(); - assertTrue(CellComparator.equals(c, kv1)); + assertTrue(CellUtil.equals(c, kv1)); assertTrue(decoder.advance()); c = decoder.current(); - assertTrue(CellComparator.equals(c, kv2)); + assertTrue(CellUtil.equals(c, kv2)); assertTrue(decoder.advance()); c = decoder.current(); - assertTrue(CellComparator.equals(c, kv3)); + assertTrue(CellUtil.equals(c, kv3)); assertFalse(decoder.advance()); dis.close(); assertEquals(offset, cis.getCount()); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java index eb0023e..81e7404 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java @@ -100,7 +100,7 @@ public class TestHalfStoreFileReader { HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf); r.loadFileInfo(); byte [] midkey = r.midkey(); - KeyValue midKV = KeyValue.createKeyValueFromKey(midkey); + KeyValue midKV = KeyValueUtil.createKeyValueFromKey(midkey); midkey = midKV.getRow(); //System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey)); @@ -166,7 +166,7 @@ public class TestHalfStoreFileReader { HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf); r.loadFileInfo(); byte[] midkey = r.midkey(); - KeyValue midKV = KeyValue.createKeyValueFromKey(midkey); + KeyValue midKV = KeyValueUtil.createKeyValueFromKey(midkey); midkey = midKV.getRow(); Reference bottom = new Reference(midkey, Reference.Range.bottom); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java index 4d17650..2322680 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java @@ -18,6 +18,10 @@ */ package org.apache.hadoop.hbase.io.hfile; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; @@ -27,7 +31,15 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; -import org.apache.hadoop.hbase.*; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Before; import org.junit.Test; @@ -36,15 +48,6 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; -import static org.junit.Assert.*; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; - @RunWith(Parameterized.class) @Category(SmallTests.class) public class TestFixedFileTrailer { @@ -94,7 +97,7 @@ public class TestFixedFileTrailer { t.setLastDataBlockOffset(291); t.setNumDataIndexLevels(3); - t.setComparatorClass(KeyValue.COMPARATOR.getClass()); + t.setComparatorClass(((KeyValue.KVComparator)KeyValue.COMPARATOR).getClass()); t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic. t.setUncompressedDataIndexSize(827398717L); // Something random. @@ -175,7 +178,7 @@ public class TestFixedFileTrailer { t.setEntryCount(((long) Integer.MAX_VALUE) + 1); t.setLastDataBlockOffset(291); t.setNumDataIndexLevels(3); - t.setComparatorClass(KeyValue.COMPARATOR.getClass()); + t.setComparatorClass(((KeyValue.KVComparator)KeyValue.COMPARATOR).getClass()); t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic. t.setUncompressedDataIndexSize(827398717L); // Something random. t.setLoadOnOpenOffset(128); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java index ef9a74f..dbb7061 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; +import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.io.compress.Compression; @@ -260,18 +261,18 @@ public class TestHFile extends HBaseTestCase { // Align scanner at start of the file. scanner.seekTo(); readAllRecords(scanner); - int seekTo = scanner.seekTo(KeyValue.createKeyValueFromKey(getSomeKey(50))); + int seekTo = scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(50))); System.out.println(seekTo); assertTrue("location lookup failed", - scanner.seekTo(KeyValue.createKeyValueFromKey(getSomeKey(50))) == 0); + scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(50))) == 0); // read the key and see if it matches ByteBuffer readKey = scanner.getKey(); assertTrue("seeked key does not match", Arrays.equals(getSomeKey(50), Bytes.toBytes(readKey))); - scanner.seekTo(KeyValue.createKeyValueFromKey(getSomeKey(0))); + scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(0))); ByteBuffer val1 = scanner.getValue(); - scanner.seekTo(KeyValue.createKeyValueFromKey(getSomeKey(0))); + scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(0))); ByteBuffer val2 = scanner.getValue(); assertTrue(Arrays.equals(Bytes.toBytes(val1), Bytes.toBytes(val2))); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java index f3e2f51..c082895 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java @@ -635,7 +635,7 @@ public class TestHFileBlockIndex { private void checkSeekTo(byte[][] keys, HFileScanner scanner, int i) throws IOException { assertEquals("Failed to seek to key #" + i + " (" + Bytes.toStringBinary(keys[i]) + ")", 0, - scanner.seekTo(KeyValue.createKeyValueFromKey(keys[i]))); + scanner.seekTo(KeyValueUtil.createKeyValueFromKey(keys[i]))); } private void assertArrayEqualsBuffer(String msgPrefix, byte[] arr, diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java index 23334ac..ca80eea 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java @@ -23,7 +23,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; -import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; @@ -81,7 +81,7 @@ public class TestHFileInlineToRootChunkConversion { HFileReaderV2 reader = (HFileReaderV2) HFile.createReader(fs, hfPath, cacheConf, conf); HFileScanner scanner = reader.getScanner(true, true); for (int i = 0; i < keys.size(); ++i) { - scanner.seekTo(KeyValue.createKeyValueFromKey(keys.get(i))); + scanner.seekTo(KeyValueUtil.createKeyValueFromKey(keys.get(i))); } reader.close(); } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java index 7310525..9bc0d77 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java @@ -42,6 +42,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RawLocalFileSystem; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.io.hfile.HFile.Reader; import org.apache.hadoop.hbase.io.hfile.HFile.Writer; @@ -193,7 +194,7 @@ public class TestHFileSeek extends TestCase { kSampler.next(key); byte [] k = new byte [key.getLength()]; System.arraycopy(key.getBytes(), 0, k, 0, key.getLength()); - if (scanner.seekTo(KeyValue.createKeyValueFromKey(k)) >= 0) { + if (scanner.seekTo(KeyValueUtil.createKeyValueFromKey(k)) >= 0) { ByteBuffer bbkey = scanner.getKey(); ByteBuffer bbval = scanner.getValue(); totalBytes += bbkey.limit(); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java index 8b92c56..57b282d 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java @@ -123,7 +123,7 @@ public class TestHFileWriterV3 { new HFileWriterV3.WriterFactoryV3(conf, new CacheConfig(conf)) .withPath(fs, hfilePath) .withFileContext(context) - .withComparator(KeyValue.COMPARATOR) + .withComparator((KVComparator)KeyValue.COMPARATOR) .create(); Random rand = new Random(9713312); // Just a fixed seed. diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java index 596387a..0f09ff4 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java @@ -29,6 +29,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.util.Bytes; @@ -63,7 +64,7 @@ public class TestReseekTo { .withOutputStream(fout) .withFileContext(context) // NOTE: This test is dependent on this deprecated nonstandard comparator - .withComparator(KeyValue.COMPARATOR) + .withComparator((KVComparator)KeyValue.COMPARATOR) .create(); int numberOfKeys = 1000; diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java index 9297e64..d6f9c23 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.Tag; @@ -82,7 +83,7 @@ public class TestSeekTo extends HBaseTestCase { .withIncludesTags(true).build(); HFile.Writer writer = HFile.getWriterFactoryNoCache(conf).withOutputStream(fout) .withFileContext(context) - .withComparator(KeyValue.COMPARATOR).create(); + .withComparator((KVComparator)KeyValue.COMPARATOR).create(); // 4 bytes * 3 * 2 for each key/value + // 3 for keys, 15 for values = 42 (woot) writer.append(toKV("c", tagUsage)); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/KeyValueScanFixture.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/KeyValueScanFixture.java index 53c55be..5fd5f4a 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/KeyValueScanFixture.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/KeyValueScanFixture.java @@ -19,13 +19,13 @@ package org.apache.hadoop.hbase.regionserver; -import org.apache.hadoop.hbase.regionserver.KeyValueScanner; -import org.apache.hadoop.hbase.util.CollectionBackedScanner; -import org.apache.hadoop.hbase.KeyValue; - import java.util.ArrayList; import java.util.List; +import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.util.CollectionBackedScanner; + /** * A fixture that implements and presents a KeyValueScanner. * It takes a list of key/values which is then sorted according @@ -33,7 +33,7 @@ import java.util.List; * to be a store file scanner. */ public class KeyValueScanFixture extends CollectionBackedScanner { - public KeyValueScanFixture(KeyValue.KVComparator comparator, + public KeyValueScanFixture(CellComparator comparator, KeyValue... incData) { super(comparator, incData); } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockStoreFile.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockStoreFile.java index 3a12674..f99226f 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockStoreFile.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockStoreFile.java @@ -23,8 +23,8 @@ import java.util.Map; import java.util.TreeMap; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.HBaseTestingUtility; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.util.Bytes; @@ -54,7 +54,7 @@ public class MockStoreFile extends StoreFile { } @Override - byte[] getFileSplitPoint(KVComparator comparator) throws IOException { + byte[] getFileSplitPoint(CellComparator comparator) throws IOException { return this.splitPoint; } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java index 08be35e..047d3b5 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java @@ -38,6 +38,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.hfile.BlockCache; @@ -353,8 +354,8 @@ public class TestCompoundBloomFilter { row, 0, 0); byte[] rowColKey = cbfb.createBloomKey(row, 0, row.length, qualifier, 0, qualifier.length); - KeyValue rowKV = KeyValue.createKeyValueFromKey(rowKey); - KeyValue rowColKV = KeyValue.createKeyValueFromKey(rowColKey); + KeyValue rowKV = KeyValueUtil.createKeyValueFromKey(rowKey); + KeyValue rowColKV = KeyValueUtil.createKeyValueFromKey(rowColKey); assertEquals(rowKV.getTimestamp(), rowColKV.getTimestamp()); assertEquals(Bytes.toStringBinary(rowKV.getRow()), Bytes.toStringBinary(rowColKV.getRow())); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index de7c5a5..972f394 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -68,7 +68,6 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.DroppedSnapshotException; @@ -2572,7 +2571,7 @@ public class TestHRegion { res = new ArrayList(); is.next(res); for (int i = 0; i < res.size(); i++) { - assertTrue(CellComparator.equalsIgnoreMvccVersion(expected1.get(i), res.get(i))); + assertTrue(CellUtil.equalsIgnoreMvccVersion(expected1.get(i), res.get(i))); } // Result 2 @@ -2583,7 +2582,7 @@ public class TestHRegion { res = new ArrayList(); is.next(res); for (int i = 0; i < res.size(); i++) { - assertTrue(CellComparator.equalsIgnoreMvccVersion(expected2.get(i), res.get(i))); + assertTrue(CellUtil.equalsIgnoreMvccVersion(expected2.get(i), res.get(i))); } } finally { HRegion.closeHRegion(this.region); @@ -2705,7 +2704,7 @@ public class TestHRegion { // Verify result for (int i = 0; i < expected.size(); i++) { - assertTrue(CellComparator.equalsIgnoreMvccVersion(expected.get(i), actual.get(i))); + assertTrue(CellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i))); } } finally { HRegion.closeHRegion(this.region); @@ -2787,7 +2786,7 @@ public class TestHRegion { // Verify result for (int i = 0; i < expected.size(); i++) { - assertTrue(CellComparator.equalsIgnoreMvccVersion(expected.get(i), actual.get(i))); + assertTrue(CellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i))); } } finally { HRegion.closeHRegion(this.region); @@ -2909,7 +2908,7 @@ public class TestHRegion { // Verify result for (int i = 0; i < expected.size(); i++) { - assertTrue(CellComparator.equalsIgnoreMvccVersion(expected.get(i), actual.get(i))); + assertTrue(CellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i))); } } finally { HRegion.closeHRegion(this.region); @@ -3036,7 +3035,7 @@ public class TestHRegion { // Verify result for (int i = 0; i < expected.size(); i++) { - assertTrue(CellComparator.equalsIgnoreMvccVersion(expected.get(i), actual.get(i))); + assertTrue(CellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i))); } } finally { HRegion.closeHRegion(this.region); diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueScanFixture.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueScanFixture.java index f31b155..454f71c 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueScanFixture.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueScanFixture.java @@ -25,6 +25,7 @@ import junit.framework.TestCase; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValueTestUtil; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.SmallTests; @@ -45,7 +46,7 @@ public class TestKeyValueScanFixture extends TestCase { 10, KeyValue.Type.Put, "value-10") }; KeyValueScanner scan = new KeyValueScanFixture( - KeyValue.COMPARATOR, kvs); + (KVComparator)KeyValue.COMPARATOR, kvs); KeyValue kv = KeyValueUtil.createFirstOnRow(Bytes.toBytes("RowA")); // should seek to this: diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java index b6242df..04fd94f 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java @@ -38,7 +38,6 @@ import org.apache.commons.lang.ArrayUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -265,7 +264,7 @@ public class TestMultiColumnScanner { assertTrue("Scanner returned additional key/value: " + kv + ", " + queryInfo + deleteInfo + ";", kvPos < kvs.size()); assertTrue("Scanner returned wrong key/value; " + queryInfo - + deleteInfo + ";", CellComparator.equalsIgnoreMvccVersion(kvs.get(kvPos), (kv))); + + deleteInfo + ";", CellUtil.equalsIgnoreMvccVersion(kvs.get(kvPos), (kv))); ++kvPos; ++numResults; } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java index 0465b93..e497859 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java @@ -27,10 +27,10 @@ import java.util.ArrayList; import java.util.List; import java.util.NavigableSet; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.client.Get; @@ -60,7 +60,7 @@ public class TestQueryMatcher extends HBaseTestCase { private Get get; long ttl = Long.MAX_VALUE; - KVComparator rowComparator; + CellComparator rowComparator; private Scan scan; public void setUp() throws Exception { diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java index 99834ed..054b9fd 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java @@ -19,7 +19,9 @@ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; @@ -48,6 +50,7 @@ import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -55,7 +58,6 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.TableName; @@ -963,7 +965,7 @@ public class TestStore { public static DefaultCompactor lastCreatedCompactor = null; @Override protected void createComponents( - Configuration conf, Store store, KVComparator comparator) throws IOException { + Configuration conf, Store store, CellComparator comparator) throws IOException { super.createComponents(conf, store, comparator); lastCreatedCompactor = this.compactor; } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java index 115926e..60f5d5b 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java @@ -164,9 +164,9 @@ public class TestStoreFile extends HBaseTestCase { // Split on a row, not in middle of row. Midkey returned by reader // may be in middle of row. Create new one with empty column and // timestamp. - KeyValue kv = KeyValue.createKeyValueFromKey(reader.midkey()); + KeyValue kv = KeyValueUtil.createKeyValueFromKey(reader.midkey()); byte [] midRow = kv.getRow(); - kv = KeyValue.createKeyValueFromKey(reader.getLastKey()); + kv = KeyValueUtil.createKeyValueFromKey(reader.getLastKey()); byte [] finalRow = kv.getRow(); // Make a reference HRegionInfo splitHri = new HRegionInfo(hri.getTable(), null, midRow); @@ -178,7 +178,7 @@ public class TestStoreFile extends HBaseTestCase { HFileScanner s = refHsf.createReader().getScanner(false, false); for(boolean first = true; (!s.isSeeked() && s.seekTo()) || s.next();) { ByteBuffer bb = s.getKey(); - kv = KeyValue.createKeyValueFromKey(bb); + kv = KeyValueUtil.createKeyValueFromKey(bb); if (first) { assertTrue(Bytes.equals(kv.getRow(), midRow)); first = false; @@ -307,7 +307,7 @@ public class TestStoreFile extends HBaseTestCase { private void checkHalfHFile(final HRegionFileSystem regionFs, final StoreFile f) throws IOException { byte [] midkey = f.createReader().midkey(); - KeyValue midKV = KeyValue.createKeyValueFromKey(midkey); + KeyValue midKV = KeyValueUtil.createKeyValueFromKey(midkey); byte [] midRow = midKV.getRow(); // Create top split. HRegionInfo topHri = new HRegionInfo(regionFs.getRegionInfo().getTable(), @@ -390,7 +390,7 @@ public class TestStoreFile extends HBaseTestCase { key.arrayOffset(), key.limit(), badmidkey, 0, badmidkey.length) >= 0); if (first) { first = false; - KeyValue keyKV = KeyValue.createKeyValueFromKey(key); + KeyValue keyKV = KeyValueUtil.createKeyValueFromKey(key); LOG.info("First top when key < bottom: " + keyKV); String tmp = Bytes.toString(keyKV.getRow()); for (int i = 0; i < tmp.length(); i++) { @@ -398,7 +398,7 @@ public class TestStoreFile extends HBaseTestCase { } } } - KeyValue keyKV = KeyValue.createKeyValueFromKey(key); + KeyValue keyKV = KeyValueUtil.createKeyValueFromKey(key); LOG.info("Last top when key < bottom: " + keyKV); String tmp = Bytes.toString(keyKV.getRow()); for (int i = 0; i < tmp.length(); i++) { @@ -422,7 +422,7 @@ public class TestStoreFile extends HBaseTestCase { key = bottomScanner.getKey(); if (first) { first = false; - keyKV = KeyValue.createKeyValueFromKey(key); + keyKV = KeyValueUtil.createKeyValueFromKey(key); LOG.info("First bottom when key > top: " + keyKV); tmp = Bytes.toString(keyKV.getRow()); for (int i = 0; i < tmp.length(); i++) { @@ -430,7 +430,7 @@ public class TestStoreFile extends HBaseTestCase { } } } - keyKV = KeyValue.createKeyValueFromKey(key); + keyKV = KeyValueUtil.createKeyValueFromKey(key); LOG.info("Last bottom when key > top: " + keyKV); for (int i = 0; i < tmp.length(); i++) { assertTrue(Bytes.toString(keyKV.getRow()).charAt(i) == 'z');