diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java index 3aaac36268..26d1d0ff5b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.yetus.audience.InterfaceAudience; @@ -36,8 +37,7 @@ import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferExce * its quota of columns, {@link #filterAllRemaining()} returns true. This * makes this filter unsuitable as a Scan filter. */ -@InterfaceAudience.Public -public class ColumnCountGetFilter extends FilterBase { +@InterfaceAudience.Public public class ColumnCountGetFilter extends FilterBase { private int limit = 0; private int count = 0; @@ -50,38 +50,34 @@ public class ColumnCountGetFilter extends FilterBase { return limit; } - @Override - public boolean filterRowKey(Cell cell) throws IOException { + @Override public boolean filterRowKey(Cell cell) throws IOException { // Impl in FilterBase might do unnecessary copy for Off heap backed Cells. - if (filterAllRemaining()) return true; + if (filterAllRemaining()) { + return true; + } return false; } - @Override - public boolean filterAllRemaining() { + @Override public boolean filterAllRemaining() { return this.count > this.limit; } - @Deprecated - @Override - public ReturnCode filterKeyValue(final Cell c) { + @Deprecated @Override public ReturnCode filterKeyValue(final Cell c) { return filterCell(c); } - @Override - public ReturnCode filterCell(final Cell c) { + @Override public ReturnCode filterCell(final Cell c) { this.count++; return filterAllRemaining() ? ReturnCode.NEXT_COL : ReturnCode.INCLUDE_AND_NEXT_COL; } - @Override - public void reset() { + @Override public void reset() { this.count = 0; } - public static Filter createFilterFromArguments(ArrayList filterArguments) { - Preconditions.checkArgument(filterArguments.size() == 1, - "Expected 1 but got: %s", filterArguments.size()); + public static Filter createFilterFromArguments(ArrayList filterArguments) { + Preconditions.checkArgument(filterArguments.size() == 1, "Expected 1 but got: %s", + filterArguments.size()); int limit = ParseFilter.convertByteArrayToInt(filterArguments.get(0)); return new ColumnCountGetFilter(limit); } @@ -89,10 +85,9 @@ public class ColumnCountGetFilter extends FilterBase { /** * @return The filter serialized using pb */ - @Override - public byte [] toByteArray() { + @Override public byte[] toByteArray() { FilterProtos.ColumnCountGetFilter.Builder builder = - FilterProtos.ColumnCountGetFilter.newBuilder(); + FilterProtos.ColumnCountGetFilter.newBuilder(); builder.setLimit(this.limit); return builder.build().toByteArray(); } @@ -103,8 +98,8 @@ public class ColumnCountGetFilter extends FilterBase { * @throws org.apache.hadoop.hbase.exceptions.DeserializationException * @see #toByteArray */ - public static ColumnCountGetFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static ColumnCountGetFilter parseFrom(final byte[] pbBytes) + throws DeserializationException { FilterProtos.ColumnCountGetFilter proto; try { proto = FilterProtos.ColumnCountGetFilter.parseFrom(pbBytes); @@ -119,17 +114,32 @@ public class ColumnCountGetFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ - @Override - boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof ColumnCountGetFilter)) return false; + @Override boolean areSerializedFieldsEqual(Filter o) { + if (o == this) { + return true; + } + if (!(o instanceof ColumnCountGetFilter)) { + return false; + } - ColumnCountGetFilter other = (ColumnCountGetFilter)o; + ColumnCountGetFilter other = (ColumnCountGetFilter) o; return this.getLimit() == other.getLimit(); } - @Override - public String toString() { + @Override public String toString() { return this.getClass().getSimpleName() + " " + this.limit; } + + @Override public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof ColumnCountGetFilter))) { + return false; + } + ColumnCountGetFilter f = (ColumnCountGetFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override public int hashCode() { + return Objects.hash(this.limit); + } + } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java index c90047da8b..4f592e98ac 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -230,4 +231,18 @@ public class ColumnPaginationFilter extends FilterBase { return String.format("%s (%d, %d)", this.getClass().getSimpleName(), this.limit, this.offset); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof ColumnPaginationFilter))) { + return false; + } + ColumnPaginationFilter f = (ColumnPaginationFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.limit, this.offset); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java index a403a40425..6c31f5c7a7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.ByteBufferExtendedCell; import org.apache.hadoop.hbase.Cell; @@ -40,11 +41,10 @@ import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations; * a particular prefix. For example, if prefix is 'an', it will pass keys with * columns like 'and', 'anti' but not keys with columns like 'ball', 'act'. */ -@InterfaceAudience.Public -public class ColumnPrefixFilter extends FilterBase { - protected byte [] prefix = null; +@InterfaceAudience.Public public class ColumnPrefixFilter extends FilterBase { + protected byte[] prefix = null; - public ColumnPrefixFilter(final byte [] prefix) { + public ColumnPrefixFilter(final byte[] prefix) { this.prefix = prefix; } @@ -52,20 +52,16 @@ public class ColumnPrefixFilter extends FilterBase { return prefix; } - @Override - public boolean filterRowKey(Cell cell) throws IOException { + @Override public boolean filterRowKey(Cell cell) throws IOException { // Impl in FilterBase might do unnecessary copy for Off heap backed Cells. return false; } - @Deprecated - @Override - public ReturnCode filterKeyValue(final Cell c) { + @Deprecated @Override public ReturnCode filterKeyValue(final Cell c) { return filterCell(c); } - @Override - public ReturnCode filterCell(final Cell cell) { + @Override public ReturnCode filterCell(final Cell cell) { if (this.prefix == null) { return ReturnCode.INCLUDE; } else { @@ -99,25 +95,25 @@ public class ColumnPrefixFilter extends FilterBase { return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) cell).getQualifierByteBuffer(), ((ByteBufferExtendedCell) cell).getQualifierPosition(), length, prefix, 0, length); } - return Bytes.compareTo(cell.getQualifierArray(), cell.getQualifierOffset(), length, prefix, 0, - length); + return Bytes + .compareTo(cell.getQualifierArray(), cell.getQualifierOffset(), length, prefix, 0, length); } - public static Filter createFilterFromArguments(ArrayList filterArguments) { - Preconditions.checkArgument(filterArguments.size() == 1, - "Expected 1 but got: %s", filterArguments.size()); - byte [] columnPrefix = ParseFilter.removeQuotesFromByteArray(filterArguments.get(0)); + public static Filter createFilterFromArguments(ArrayList filterArguments) { + Preconditions.checkArgument(filterArguments.size() == 1, "Expected 1 but got: %s", + filterArguments.size()); + byte[] columnPrefix = ParseFilter.removeQuotesFromByteArray(filterArguments.get(0)); return new ColumnPrefixFilter(columnPrefix); } /** * @return The filter serialized using pb */ - @Override - public byte [] toByteArray() { - FilterProtos.ColumnPrefixFilter.Builder builder = - FilterProtos.ColumnPrefixFilter.newBuilder(); - if (this.prefix != null) builder.setPrefix(UnsafeByteOperations.unsafeWrap(this.prefix)); + @Override public byte[] toByteArray() { + FilterProtos.ColumnPrefixFilter.Builder builder = FilterProtos.ColumnPrefixFilter.newBuilder(); + if (this.prefix != null) { + builder.setPrefix(UnsafeByteOperations.unsafeWrap(this.prefix)); + } return builder.build().toByteArray(); } @@ -127,8 +123,7 @@ public class ColumnPrefixFilter extends FilterBase { * @throws org.apache.hadoop.hbase.exceptions.DeserializationException * @see #toByteArray */ - public static ColumnPrefixFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static ColumnPrefixFilter parseFrom(final byte[] pbBytes) throws DeserializationException { FilterProtos.ColumnPrefixFilter proto; try { proto = FilterProtos.ColumnPrefixFilter.parseFrom(pbBytes); @@ -143,22 +138,35 @@ public class ColumnPrefixFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ - @Override - boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof ColumnPrefixFilter)) return false; + @Override boolean areSerializedFieldsEqual(Filter o) { + if (o == this) { + return true; + } + if (!(o instanceof ColumnPrefixFilter)) { + return false; + } - ColumnPrefixFilter other = (ColumnPrefixFilter)o; + ColumnPrefixFilter other = (ColumnPrefixFilter) o; return Bytes.equals(this.getPrefix(), other.getPrefix()); } - @Override - public Cell getNextCellHint(Cell cell) { + @Override public Cell getNextCellHint(Cell cell) { return PrivateCellUtil.createFirstOnRowCol(cell, prefix, 0, prefix.length); } - @Override - public String toString() { + @Override public String toString() { return this.getClass().getSimpleName() + " " + Bytes.toStringBinary(this.prefix); } + + @Override public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof ColumnPrefixFilter))) { + return false; + } + ColumnPrefixFilter f = (ColumnPrefixFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override public int hashCode() { + return Objects.hash(Bytes.hashCode(this.getPrefix())); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java index f981ed75e4..829284f54b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java @@ -23,6 +23,7 @@ import static org.apache.hadoop.hbase.util.Bytes.len; import java.io.IOException; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -41,15 +42,14 @@ import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; * between minColumn to maxColumn. For example, if minColumn is 'an', and * maxColumn is 'be', it will pass keys with columns like 'ana', 'bad', but not * keys with columns like 'bed', 'eye' - * + *

* If minColumn is null, there is no lower bound. If maxColumn is null, there is * no upper bound. - * + *

* minColumnInclusive and maxColumnInclusive specify if the ranges are inclusive * or not. */ -@InterfaceAudience.Public -public class ColumnRangeFilter extends FilterBase { +@InterfaceAudience.Public public class ColumnRangeFilter extends FilterBase { protected byte[] minColumn = null; protected boolean minColumnInclusive = true; protected byte[] maxColumn = null; @@ -58,12 +58,13 @@ public class ColumnRangeFilter extends FilterBase { /** * Create a filter to select those keys with columns that are between minColumn * and maxColumn. - * @param minColumn minimum value for the column range. If if it's null, - * there is no lower bound. + * + * @param minColumn minimum value for the column range. If if it's null, + * there is no lower bound. * @param minColumnInclusive if true, include minColumn in the range. - * @param maxColumn maximum value for the column range. If it's null, + * @param maxColumn maximum value for the column range. If it's null, * @param maxColumnInclusive if true, include maxColumn in the range. - * there is no upper bound. + * there is no upper bound. */ public ColumnRangeFilter(final byte[] minColumn, boolean minColumnInclusive, final byte[] maxColumn, boolean maxColumnInclusive) { @@ -115,20 +116,16 @@ public class ColumnRangeFilter extends FilterBase { return this.maxColumnInclusive; } - @Override - public boolean filterRowKey(Cell cell) throws IOException { + @Override public boolean filterRowKey(Cell cell) throws IOException { // Impl in FilterBase might do unnecessary copy for Off heap backed Cells. return false; } - @Override - @Deprecated - public ReturnCode filterKeyValue(final Cell c) { + @Override @Deprecated public ReturnCode filterKeyValue(final Cell c) { return filterCell(c); } - @Override - public ReturnCode filterCell(final Cell c) { + @Override public ReturnCode filterCell(final Cell c) { int cmpMin = 1; if (this.minColumn != null) { @@ -156,34 +153,33 @@ public class ColumnRangeFilter extends FilterBase { return ReturnCode.NEXT_ROW; } - public static Filter createFilterFromArguments(ArrayList filterArguments) { - Preconditions.checkArgument(filterArguments.size() == 4, - "Expected 4 but got: %s", filterArguments.size()); - byte [] minColumn = ParseFilter.removeQuotesFromByteArray(filterArguments.get(0)); + public static Filter createFilterFromArguments(ArrayList filterArguments) { + Preconditions.checkArgument(filterArguments.size() == 4, "Expected 4 but got: %s", + filterArguments.size()); + byte[] minColumn = ParseFilter.removeQuotesFromByteArray(filterArguments.get(0)); boolean minColumnInclusive = ParseFilter.convertByteArrayToBoolean(filterArguments.get(1)); - byte [] maxColumn = ParseFilter.removeQuotesFromByteArray(filterArguments.get(2)); + byte[] maxColumn = ParseFilter.removeQuotesFromByteArray(filterArguments.get(2)); boolean maxColumnInclusive = ParseFilter.convertByteArrayToBoolean(filterArguments.get(3)); if (minColumn.length == 0) minColumn = null; if (maxColumn.length == 0) maxColumn = null; - return new ColumnRangeFilter(minColumn, minColumnInclusive, - maxColumn, maxColumnInclusive); + return new ColumnRangeFilter(minColumn, minColumnInclusive, maxColumn, maxColumnInclusive); } /** * @return The filter serialized using pb */ - @Override - public byte [] toByteArray() { - FilterProtos.ColumnRangeFilter.Builder builder = - FilterProtos.ColumnRangeFilter.newBuilder(); - if (this.minColumn != null) builder.setMinColumn( - UnsafeByteOperations.unsafeWrap(this.minColumn)); + @Override public byte[] toByteArray() { + FilterProtos.ColumnRangeFilter.Builder builder = FilterProtos.ColumnRangeFilter.newBuilder(); + if (this.minColumn != null) { + builder.setMinColumn(UnsafeByteOperations.unsafeWrap(this.minColumn)); + } builder.setMinColumnInclusive(this.minColumnInclusive); - if (this.maxColumn != null) builder.setMaxColumn( - UnsafeByteOperations.unsafeWrap(this.maxColumn)); + if (this.maxColumn != null) { + builder.setMaxColumn(UnsafeByteOperations.unsafeWrap(this.maxColumn)); + } builder.setMaxColumnInclusive(this.maxColumnInclusive); return builder.build().toByteArray(); } @@ -194,26 +190,25 @@ public class ColumnRangeFilter extends FilterBase { * @throws DeserializationException * @see #toByteArray */ - public static ColumnRangeFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static ColumnRangeFilter parseFrom(final byte[] pbBytes) throws DeserializationException { FilterProtos.ColumnRangeFilter proto; try { proto = FilterProtos.ColumnRangeFilter.parseFrom(pbBytes); } catch (InvalidProtocolBufferException e) { throw new DeserializationException(e); } - return new ColumnRangeFilter(proto.hasMinColumn()?proto.getMinColumn().toByteArray():null, - proto.getMinColumnInclusive(),proto.hasMaxColumn()?proto.getMaxColumn().toByteArray():null, - proto.getMaxColumnInclusive()); + return new ColumnRangeFilter(proto.hasMinColumn() ? proto.getMinColumn().toByteArray() : null, + proto.getMinColumnInclusive(), + proto.hasMaxColumn() ? proto.getMaxColumn().toByteArray() : null, + proto.getMaxColumnInclusive()); } /** * @param o filter to serialize. * @return true if and only if the fields of the filter that are serialized are equal to the - * corresponding fields in other. Used for testing. + * corresponding fields in other. Used for testing. */ - @Override - boolean areSerializedFieldsEqual(Filter o) { + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) { return true; } @@ -222,21 +217,31 @@ public class ColumnRangeFilter extends FilterBase { } ColumnRangeFilter other = (ColumnRangeFilter) o; return Bytes.equals(this.getMinColumn(), other.getMinColumn()) - && this.getMinColumnInclusive() == other.getMinColumnInclusive() - && Bytes.equals(this.getMaxColumn(), other.getMaxColumn()) - && this.getMaxColumnInclusive() == other.getMaxColumnInclusive(); + && this.getMinColumnInclusive() == other.getMinColumnInclusive() && Bytes + .equals(this.getMaxColumn(), other.getMaxColumn()) && this.getMaxColumnInclusive() == other + .getMaxColumnInclusive(); } - @Override - public Cell getNextCellHint(Cell cell) { + @Override public Cell getNextCellHint(Cell cell) { return PrivateCellUtil.createFirstOnRowCol(cell, this.minColumn, 0, len(this.minColumn)); } - @Override - public String toString() { - return this.getClass().getSimpleName() + " " - + (this.minColumnInclusive ? "[" : "(") + Bytes.toStringBinary(this.minColumn) - + ", " + Bytes.toStringBinary(this.maxColumn) + @Override public String toString() { + return this.getClass().getSimpleName() + " " + (this.minColumnInclusive ? "[" : "(") + Bytes + .toStringBinary(this.minColumn) + ", " + Bytes.toStringBinary(this.maxColumn) + (this.maxColumnInclusive ? "]" : ")"); } + + @Override public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof ColumnRangeFilter))) { + return false; + } + ColumnRangeFilter f = (ColumnRangeFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override public int hashCode() { + return Objects.hash(Bytes.toStringBinary(this.getMinColumn()), this.getMinColumnInclusive(), + Bytes.toStringBinary(this.getMaxColumn()), this.getMaxColumnInclusive()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java index 07951658f8..9e61dba819 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -238,4 +239,18 @@ public class ColumnValueFilter extends FilterBase { Bytes.toStringBinary(this.qualifier), this.op.name(), Bytes.toStringBinary(this.comparator.getValue())); } + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof ColumnValueFilter))) { + return false; + } + ColumnValueFilter f = (ColumnValueFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(Bytes.hashCode(this.getFamily()), Bytes.hashCode(this.getQualifier()) + , this.getCompareOperator(), this.getComparator()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java index 50924ec631..73c60fd646 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; @@ -321,4 +322,18 @@ public abstract class CompareFilter extends FilterBase { this.op.name(), Bytes.toStringBinary(this.comparator.getValue())); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof CompareFilter))) { + return false; + } + CompareFilter f = (CompareFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.getComparator(), this.getCompareOperator()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java index 8f5dee7924..7d8dd60d8d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.List; +import java.util.Objects; import java.util.Set; import org.apache.hadoop.hbase.Cell; @@ -311,4 +312,18 @@ public class DependentColumnFilter extends CompareFilter { this.op.name(), this.comparator != null ? Bytes.toStringBinary(this.comparator.getValue()) : "null"); } + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof DependentColumnFilter))) { + return false; + } + DependentColumnFilter f = (DependentColumnFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(Bytes.hashCode(this.getFamily()),Bytes.hashCode(this.getQualifier()), + this.dropDependentColumn()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java index f114e98b7d..57279f0223 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; - +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; import org.apache.yetus.audience.InterfaceAudience; @@ -146,4 +146,18 @@ public class FamilyFilter extends CompareFilter { FamilyFilter other = (FamilyFilter)o; return super.areSerializedFieldsEqual(other); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof FamilyFilter))) { + return false; + } + FamilyFilter f = (FamilyFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.getComparator(), this.getCompareOperator()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java index c549eabb6c..f8be2f6db9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java @@ -23,6 +23,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.yetus.audience.InterfaceAudience; @@ -276,4 +277,17 @@ final public class FilterList extends FilterBase { public String toString() { return this.filterListBase.toString(); } + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof FilterList))) { + return false; + } + FilterList f = (FilterList) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.getOperator(), Arrays.hashCode(this.getFilters().toArray())); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java index ebff3a5d73..b9fe00ab79 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java @@ -24,8 +24,10 @@ import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Objects; /** * FilterListWithAND represents an ordered list of filters which will be evaluated with an AND @@ -279,4 +281,21 @@ public class FilterListWithAND extends FilterListBase { } return maxHint; } + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof FilterListWithAND))) { + return false; + } + if (this == obj) { + return true; + } + FilterListWithAND f = (FilterListWithAND) obj; + return this.filters.equals(f.getFilters()) + &&this.seekHintFilters.equals(f.seekHintFilters); + } + + @Override + public int hashCode() { + return Objects.hash(this.seekHintFilters, Arrays.hashCode(this.filters.toArray())); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java index 064dd8387b..991563e13c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java @@ -26,8 +26,10 @@ import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Objects; /** * FilterListWithOR represents an ordered list of filters which will be evaluated with an OR @@ -391,4 +393,26 @@ public class FilterListWithOR extends FilterListBase { } return minKeyHint; } + + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof FilterListWithOR))) { + return false; + } + if (this == obj) { + return true; + } + FilterListWithOR f = (FilterListWithOR) obj; + return this.filters.equals(f.getFilters()) + &&this.prevFilterRCList.equals(f.prevFilterRCList) + &&this.prevCellList.equals(f.prevCellList); + } + + @Override + public int hashCode() { + return Objects.hash(Arrays.hashCode(this.prevFilterRCList.toArray()), + Arrays.hashCode(this.prevCellList.toArray()), + Arrays.hashCode(this.filters.toArray())); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java index a6b990f4b7..38f8682bcb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java @@ -34,40 +34,36 @@ import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferExce *

* This filter can be used to more efficiently perform row count operations. */ -@InterfaceAudience.Public -public class FirstKeyOnlyFilter extends FilterBase { +@InterfaceAudience.Public public class FirstKeyOnlyFilter extends FilterBase { private boolean foundKV = false; public FirstKeyOnlyFilter() { } - @Override - public void reset() { + @Override public void reset() { foundKV = false; } - @Override - public boolean filterRowKey(Cell cell) throws IOException { + @Override public boolean filterRowKey(Cell cell) throws IOException { // Impl in FilterBase might do unnecessary copy for Off heap backed Cells. return false; } - @Deprecated - @Override - public ReturnCode filterKeyValue(final Cell c) { + @Deprecated @Override public ReturnCode filterKeyValue(final Cell c) { return filterCell(c); } - @Override - public ReturnCode filterCell(final Cell c) { - if(foundKV) return ReturnCode.NEXT_ROW; + @Override public ReturnCode filterCell(final Cell c) { + if (foundKV) { + return ReturnCode.NEXT_ROW; + } foundKV = true; return ReturnCode.INCLUDE; } - public static Filter createFilterFromArguments(ArrayList filterArguments) { - Preconditions.checkArgument(filterArguments.isEmpty(), - "Expected 0 but got: %s", filterArguments.size()); + public static Filter createFilterFromArguments(ArrayList filterArguments) { + Preconditions + .checkArgument(filterArguments.isEmpty(), "Expected 0 but got: %s", filterArguments.size()); return new FirstKeyOnlyFilter(); } @@ -79,7 +75,6 @@ public class FirstKeyOnlyFilter extends FilterBase { } /** - * * @param value update {@link #foundKV} flag with value. */ protected void setFoundKV(boolean value) { @@ -89,10 +84,8 @@ public class FirstKeyOnlyFilter extends FilterBase { /** * @return The filter serialized using pb */ - @Override - public byte [] toByteArray() { - FilterProtos.FirstKeyOnlyFilter.Builder builder = - FilterProtos.FirstKeyOnlyFilter.newBuilder(); + @Override public byte[] toByteArray() { + FilterProtos.FirstKeyOnlyFilter.Builder builder = FilterProtos.FirstKeyOnlyFilter.newBuilder(); return builder.build().toByteArray(); } @@ -102,8 +95,7 @@ public class FirstKeyOnlyFilter extends FilterBase { * @throws org.apache.hadoop.hbase.exceptions.DeserializationException * @see #toByteArray */ - public static FirstKeyOnlyFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static FirstKeyOnlyFilter parseFrom(final byte[] pbBytes) throws DeserializationException { // There is nothing to deserialize. Why do this at all? try { FilterProtos.FirstKeyOnlyFilter.parseFrom(pbBytes); @@ -119,10 +111,11 @@ public class FirstKeyOnlyFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ - @Override - boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof FirstKeyOnlyFilter)) return false; + @Override boolean areSerializedFieldsEqual(Filter o) { + if (o == this) + return true; + if (!(o instanceof FirstKeyOnlyFilter)) + return false; return true; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java index 88dc36a740..4775643fb3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.filter; +import java.util.Objects; import java.util.Set; import java.util.TreeSet; @@ -133,4 +134,18 @@ public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter { FirstKeyValueMatchingQualifiersFilter other = (FirstKeyValueMatchingQualifiersFilter)o; return this.qualifiers.equals(other.qualifiers); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof FirstKeyValueMatchingQualifiersFilter))) { + return false; + } + FirstKeyValueMatchingQualifiersFilter f = (FirstKeyValueMatchingQualifiersFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.qualifiers); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java index 714c550ddd..73743984dc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java @@ -6,9 +6,9 @@ * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

* Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,6 +22,7 @@ import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.PriorityQueue; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; @@ -57,8 +58,7 @@ import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesti * "\x01\x01\x01\x01\x00\x00\x00\x00\x01\x01\x01\x01\x00\x00\x00" I.e. fuzzy info tells the matching * mask is "????_99_????_01", where at ? can be any value. */ -@InterfaceAudience.Public -public class FuzzyRowFilter extends FilterBase { +@InterfaceAudience.Public public class FuzzyRowFilter extends FilterBase { private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); private List> fuzzyKeysData; private boolean done = false; @@ -80,8 +80,8 @@ public class FuzzyRowFilter extends FilterBase { for (Pair aFuzzyKeysData : fuzzyKeysData) { if (aFuzzyKeysData.getFirst().length != aFuzzyKeysData.getSecond().length) { - Pair readable = - new Pair<>(Bytes.toStringBinary(aFuzzyKeysData.getFirst()), Bytes.toStringBinary(aFuzzyKeysData.getSecond())); + Pair readable = new Pair<>(Bytes.toStringBinary(aFuzzyKeysData.getFirst()), + Bytes.toStringBinary(aFuzzyKeysData.getSecond())); throw new IllegalArgumentException("Fuzzy pair lengths do not match: " + readable); } @@ -100,7 +100,6 @@ public class FuzzyRowFilter extends FilterBase { this.tracker = new RowTracker(); } - private void preprocessSearchKey(Pair p) { if (!UNSAFE_UNALIGNED) { // do nothing @@ -127,7 +126,8 @@ public class FuzzyRowFilter extends FilterBase { // do nothing return mask; } - if (isPreprocessedMask(mask)) return mask; + if (isPreprocessedMask(mask)) + return mask; for (int i = 0; i < mask.length; i++) { if (mask[i] == 0) { mask[i] = -1; // 0 -> -1 @@ -147,14 +147,11 @@ public class FuzzyRowFilter extends FilterBase { return true; } - @Deprecated - @Override - public ReturnCode filterKeyValue(final Cell c) { + @Deprecated @Override public ReturnCode filterKeyValue(final Cell c) { return filterCell(c); } - @Override - public ReturnCode filterCell(final Cell c) { + @Override public ReturnCode filterCell(final Cell c) { final int startIndex = lastFoundIndex >= 0 ? lastFoundIndex : 0; final int size = fuzzyKeysData.size(); for (int i = startIndex; i < size + startIndex; i++) { @@ -166,7 +163,7 @@ public class FuzzyRowFilter extends FilterBase { } SatisfiesCode satisfiesCode = satisfies(isReversed(), c.getRowArray(), c.getRowOffset(), c.getRowLength(), - fuzzyData.getFirst(), fuzzyData.getSecond()); + fuzzyData.getFirst(), fuzzyData.getSecond()); if (satisfiesCode == SatisfiesCode.YES) { lastFoundIndex = index; return ReturnCode.INCLUDE; @@ -179,8 +176,7 @@ public class FuzzyRowFilter extends FilterBase { } - @Override - public Cell getNextCellHint(Cell currentCell) { + @Override public Cell getNextCellHint(Cell currentCell) { boolean result = tracker.updateTracker(currentCell); if (result == false) { done = true; @@ -204,14 +200,14 @@ public class FuzzyRowFilter extends FilterBase { RowTracker() { nextRows = new PriorityQueue<>(fuzzyKeysData.size(), - new Comparator>>() { - @Override - public int compare(Pair> o1, - Pair> o2) { - return isReversed()? Bytes.compareTo(o2.getFirst(), o1.getFirst()): - Bytes.compareTo(o1.getFirst(), o2.getFirst()); - } - }); + new Comparator>>() { + @Override public int compare(Pair> o1, + Pair> o2) { + return isReversed() ? + Bytes.compareTo(o2.getFirst(), o1.getFirst()) : + Bytes.compareTo(o1.getFirst(), o2.getFirst()); + } + }); } byte[] nextRow() { @@ -240,14 +236,15 @@ public class FuzzyRowFilter extends FilterBase { } boolean lessThan(Cell currentCell, byte[] nextRowKey) { - int compareResult = CellComparator.getInstance().compareRows(currentCell, nextRowKey, 0, nextRowKey.length); + int compareResult = + CellComparator.getInstance().compareRows(currentCell, nextRowKey, 0, nextRowKey.length); return (!isReversed() && compareResult < 0) || (isReversed() && compareResult > 0); } void updateWith(Cell currentCell, Pair fuzzyData) { byte[] nextRowKeyCandidate = getNextForFuzzyRule(isReversed(), currentCell.getRowArray(), currentCell.getRowOffset(), - currentCell.getRowLength(), fuzzyData.getFirst(), fuzzyData.getSecond()); + currentCell.getRowLength(), fuzzyData.getFirst(), fuzzyData.getSecond()); if (nextRowKeyCandidate != null) { nextRows.add(new Pair<>(nextRowKeyCandidate, fuzzyData)); } @@ -255,16 +252,14 @@ public class FuzzyRowFilter extends FilterBase { } - @Override - public boolean filterAllRemaining() { + @Override public boolean filterAllRemaining() { return done; } /** * @return The filter serialized using pb */ - @Override - public byte[] toByteArray() { + @Override public byte[] toByteArray() { FilterProtos.FuzzyRowFilter.Builder builder = FilterProtos.FuzzyRowFilter.newBuilder(); for (Pair fuzzyData : fuzzyKeysData) { BytesBytesPair.Builder bbpBuilder = BytesBytesPair.newBuilder(); @@ -299,8 +294,7 @@ public class FuzzyRowFilter extends FilterBase { return new FuzzyRowFilter(fuzzyKeysData); } - @Override - public String toString() { + @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("FuzzyRowFilter"); sb.append("{fuzzyKeysData="); @@ -314,12 +308,11 @@ public class FuzzyRowFilter extends FilterBase { // Utility methods + static enum SatisfiesCode { /** row satisfies fuzzy rule */ - YES, - /** row doesn't satisfy fuzzy rule, but there's possible greater row that does */ - NEXT_EXISTS, - /** row doesn't satisfy fuzzy rule and there's no greater row that does */ + YES, /** row doesn't satisfy fuzzy rule, but there's possible greater row that does */ + NEXT_EXISTS, /** row doesn't satisfy fuzzy rule and there's no greater row that does */ NO_NEXT } @@ -458,56 +451,45 @@ public class FuzzyRowFilter extends FilterBase { /** Abstracts directional comparisons based on scan direction. */ private enum Order { ASC { - @Override - public boolean lt(int lhs, int rhs) { + @Override public boolean lt(int lhs, int rhs) { return lhs < rhs; } - @Override - public boolean gt(int lhs, int rhs) { + @Override public boolean gt(int lhs, int rhs) { return lhs > rhs; } - @Override - public byte inc(byte val) { + @Override public byte inc(byte val) { // TODO: what about over/underflow? return (byte) (val + 1); } - @Override - public boolean isMax(byte val) { + @Override public boolean isMax(byte val) { return val == (byte) 0xff; } - @Override - public byte min() { + @Override public byte min() { return 0; } - }, - DESC { - @Override - public boolean lt(int lhs, int rhs) { + }, DESC { + @Override public boolean lt(int lhs, int rhs) { return lhs > rhs; } - @Override - public boolean gt(int lhs, int rhs) { + @Override public boolean gt(int lhs, int rhs) { return lhs < rhs; } - @Override - public byte inc(byte val) { + @Override public byte inc(byte val) { // TODO: what about over/underflow? return (byte) (val - 1); } - @Override - public boolean isMax(byte val) { + @Override public boolean isMax(byte val) { return val == 0; } - @Override - public byte min() { + @Override public byte min() { return (byte) 0xFF; } }; @@ -536,9 +518,8 @@ public class FuzzyRowFilter extends FilterBase { * @return greater byte array than given (row) which satisfies the fuzzy rule if it exists, null * otherwise */ - @VisibleForTesting - static byte[] getNextForFuzzyRule(boolean reverse, byte[] row, int offset, int length, - byte[] fuzzyKeyBytes, byte[] fuzzyKeyMeta) { + @VisibleForTesting static byte[] getNextForFuzzyRule(boolean reverse, byte[] row, int offset, + int length, byte[] fuzzyKeyBytes, byte[] fuzzyKeyMeta) { // To find out the next "smallest" byte array that satisfies fuzzy rule and "greater" than // the given one we do the following: // 1. setting values on all "fixed" positions to the values from fuzzyKeyBytes @@ -598,7 +579,7 @@ public class FuzzyRowFilter extends FilterBase { } } - return reverse? result: trimTrailingZeroes(result, fuzzyKeyMeta, toInc); + return reverse ? result : trimTrailingZeroes(result, fuzzyKeyMeta, toInc); } /** @@ -606,21 +587,24 @@ public class FuzzyRowFilter extends FilterBase { * unless they are part of fuzzyKeyMeta * hint = '\x01\x01\x01\x00\x00' * will skip valid row '\x01\x01\x01' - * + * * @param result * @param fuzzyKeyMeta * @param toInc - position of incremented byte * @return trimmed version of result */ - + private static byte[] trimTrailingZeroes(byte[] result, byte[] fuzzyKeyMeta, int toInc) { - int off = fuzzyKeyMeta.length >= result.length? result.length -1: - fuzzyKeyMeta.length -1; - for( ; off >= 0; off--){ - if(fuzzyKeyMeta[off] != 0) break; + int off = fuzzyKeyMeta.length >= result.length ? result.length - 1 : fuzzyKeyMeta.length - 1; + for (; off >= 0; off--) { + if (fuzzyKeyMeta[off] != 0) { + break; + } + } + if (off < toInc) { + off = toInc; } - if (off < toInc) off = toInc; - byte[] retValue = new byte[off+1]; + byte[] retValue = new byte[off + 1]; System.arraycopy(result, 0, retValue, 0, retValue.length); return retValue; } @@ -629,21 +613,37 @@ public class FuzzyRowFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized are equal to the * corresponding fields in other. Used for testing. */ - @Override - boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof FuzzyRowFilter)) return false; + @Override boolean areSerializedFieldsEqual(Filter o) { + if (o == this) { + return true; + } + if (!(o instanceof FuzzyRowFilter)) { + return false; + } FuzzyRowFilter other = (FuzzyRowFilter) o; - if (this.fuzzyKeysData.size() != other.fuzzyKeysData.size()) return false; + if (this.fuzzyKeysData.size() != other.fuzzyKeysData.size()) + return false; for (int i = 0; i < fuzzyKeysData.size(); ++i) { Pair thisData = this.fuzzyKeysData.get(i); Pair otherData = other.fuzzyKeysData.get(i); - if (!(Bytes.equals(thisData.getFirst(), otherData.getFirst()) && Bytes.equals( - thisData.getSecond(), otherData.getSecond()))) { + if (!(Bytes.equals(thisData.getFirst(), otherData.getFirst()) && Bytes + .equals(thisData.getSecond(), otherData.getSecond()))) { return false; } } return true; } + + @Override public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof FuzzyRowFilter))) { + return false; + } + FuzzyRowFilter f = (FuzzyRowFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override public int hashCode() { + return Objects.hash(this.fuzzyKeysData.toArray()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java index 37564994de..0e1749a3ea 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.filter; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; @@ -131,4 +132,18 @@ public class InclusiveStopFilter extends FilterBase { public String toString() { return this.getClass().getSimpleName() + " " + Bytes.toStringBinary(this.stopRowKey); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof InclusiveStopFilter))) { + return false; + } + InclusiveStopFilter f = (InclusiveStopFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(Bytes.hashCode(this.stopRowKey)); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java index a66441bceb..40509ec3d5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java @@ -23,7 +23,9 @@ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; +import java.util.Objects; import java.util.Optional; + import org.apache.hadoop.hbase.ByteBufferExtendedCell; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; @@ -44,21 +46,28 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; * This filter can be used to grab all of the keys without having to also grab * the values. */ -@InterfaceAudience.Public -public class KeyOnlyFilter extends FilterBase { +@InterfaceAudience.Public public class KeyOnlyFilter extends FilterBase { boolean lenAsVal; - public KeyOnlyFilter() { this(false); } - public KeyOnlyFilter(boolean lenAsVal) { this.lenAsVal = lenAsVal; } - @Override - public boolean filterRowKey(Cell cell) throws IOException { + public KeyOnlyFilter() { + this(false); + } + + public KeyOnlyFilter(boolean lenAsVal) { + this.lenAsVal = lenAsVal; + } + + @Override public boolean filterRowKey(Cell cell) throws IOException { // Impl in FilterBase might do unnecessary copy for Off heap backed Cells. return false; } - @Override - public Cell transformCell(Cell cell) { + public boolean isLenAsVal() { + return this.lenAsVal; + } + + @Override public Cell transformCell(Cell cell) { return createKeyOnlyCell(cell); } @@ -70,20 +79,17 @@ public class KeyOnlyFilter extends FilterBase { } } - @Deprecated - @Override - public ReturnCode filterKeyValue(final Cell ignored) throws IOException { + @Deprecated @Override public ReturnCode filterKeyValue(final Cell ignored) throws IOException { return filterCell(ignored); } - @Override - public ReturnCode filterCell(final Cell ignored) throws IOException { + @Override public ReturnCode filterCell(final Cell ignored) throws IOException { return ReturnCode.INCLUDE; } - public static Filter createFilterFromArguments(ArrayList filterArguments) { + public static Filter createFilterFromArguments(ArrayList filterArguments) { Preconditions.checkArgument((filterArguments.isEmpty() || filterArguments.size() == 1), - "Expected: 0 or 1 but got: %s", filterArguments.size()); + "Expected: 0 or 1 but got: %s", filterArguments.size()); KeyOnlyFilter filter = new KeyOnlyFilter(); if (filterArguments.size() == 1) { filter.lenAsVal = ParseFilter.convertByteArrayToBoolean(filterArguments.get(0)); @@ -94,10 +100,8 @@ public class KeyOnlyFilter extends FilterBase { /** * @return The filter serialized using pb */ - @Override - public byte [] toByteArray() { - FilterProtos.KeyOnlyFilter.Builder builder = - FilterProtos.KeyOnlyFilter.newBuilder(); + @Override public byte[] toByteArray() { + FilterProtos.KeyOnlyFilter.Builder builder = FilterProtos.KeyOnlyFilter.newBuilder(); builder.setLenAsVal(this.lenAsVal); return builder.build().toByteArray(); } @@ -108,8 +112,7 @@ public class KeyOnlyFilter extends FilterBase { * @throws DeserializationException * @see #toByteArray */ - public static KeyOnlyFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static KeyOnlyFilter parseFrom(final byte[] pbBytes) throws DeserializationException { FilterProtos.KeyOnlyFilter proto; try { proto = FilterProtos.KeyOnlyFilter.parseFrom(pbBytes); @@ -124,12 +127,15 @@ public class KeyOnlyFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ - @Override - boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof KeyOnlyFilter)) return false; + @Override boolean areSerializedFieldsEqual(Filter o) { + if (o == this) { + return true; + } + if (!(o instanceof KeyOnlyFilter)) { + return false; + } - KeyOnlyFilter other = (KeyOnlyFilter)o; + KeyOnlyFilter other = (KeyOnlyFilter) o; return this.lenAsVal == other.lenAsVal; } @@ -142,74 +148,59 @@ public class KeyOnlyFilter extends FilterBase { this.lenAsVal = lenAsVal; } - @Override - public byte[] getRowArray() { + @Override public byte[] getRowArray() { return cell.getRowArray(); } - @Override - public int getRowOffset() { + @Override public int getRowOffset() { return cell.getRowOffset(); } - @Override - public short getRowLength() { + @Override public short getRowLength() { return cell.getRowLength(); } - @Override - public byte[] getFamilyArray() { + @Override public byte[] getFamilyArray() { return cell.getFamilyArray(); } - @Override - public int getFamilyOffset() { + @Override public int getFamilyOffset() { return cell.getFamilyOffset(); } - @Override - public byte getFamilyLength() { + @Override public byte getFamilyLength() { return cell.getFamilyLength(); } - @Override - public byte[] getQualifierArray() { + @Override public byte[] getQualifierArray() { return cell.getQualifierArray(); } - @Override - public int getQualifierOffset() { + @Override public int getQualifierOffset() { return cell.getQualifierOffset(); } - @Override - public int getQualifierLength() { + @Override public int getQualifierLength() { return cell.getQualifierLength(); } - @Override - public long getTimestamp() { + @Override public long getTimestamp() { return cell.getTimestamp(); } - @Override - public byte getTypeByte() { + @Override public byte getTypeByte() { return cell.getTypeByte(); } - @Override - public Type getType() { + @Override public Type getType() { return cell.getType(); } - - @Override - public long getSequenceId() { + @Override public long getSequenceId() { return 0; } - @Override - public byte[] getValueArray() { + @Override public byte[] getValueArray() { if (lenAsVal) { return Bytes.toBytes(cell.getValueLength()); } else { @@ -217,13 +208,11 @@ public class KeyOnlyFilter extends FilterBase { } } - @Override - public int getValueOffset() { + @Override public int getValueOffset() { return 0; } - @Override - public int getValueLength() { + @Override public int getValueLength() { if (lenAsVal) { return Bytes.SIZEOF_INT; } else { @@ -231,25 +220,34 @@ public class KeyOnlyFilter extends FilterBase { } } - @Override - public byte[] getTagsArray() { + @Override public byte[] getTagsArray() { return HConstants.EMPTY_BYTE_ARRAY; } - @Override - public int getTagsOffset() { + @Override public int getTagsOffset() { return 0; } - @Override - public int getTagsLength() { + @Override public int getTagsLength() { return 0; } } + @Override public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof KeyOnlyFilter))) { + return false; + } + KeyOnlyFilter f = (KeyOnlyFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override public int hashCode() { + return Objects.hash(this.lenAsVal); + } + static class KeyOnlyByteBufferExtendedCell extends ByteBufferExtendedCell { - public static final int FIXED_OVERHEAD = ClassSize.OBJECT + ClassSize.REFERENCE - + Bytes.SIZEOF_BOOLEAN; + public static final int FIXED_OVERHEAD = + ClassSize.OBJECT + ClassSize.REFERENCE + Bytes.SIZEOF_BOOLEAN; private ByteBufferExtendedCell cell; private boolean lenAsVal; @@ -258,88 +256,71 @@ public class KeyOnlyFilter extends FilterBase { this.lenAsVal = lenAsVal; } - @Override - public byte[] getRowArray() { + @Override public byte[] getRowArray() { return cell.getRowArray(); } - @Override - public int getRowOffset() { + @Override public int getRowOffset() { return cell.getRowOffset(); } - @Override - public short getRowLength() { + @Override public short getRowLength() { return cell.getRowLength(); } - @Override - public byte[] getFamilyArray() { + @Override public byte[] getFamilyArray() { return cell.getFamilyArray(); } - @Override - public int getFamilyOffset() { + @Override public int getFamilyOffset() { return cell.getFamilyOffset(); } - @Override - public byte getFamilyLength() { + @Override public byte getFamilyLength() { return cell.getFamilyLength(); } - @Override - public byte[] getQualifierArray() { + @Override public byte[] getQualifierArray() { return cell.getQualifierArray(); } - @Override - public int getQualifierOffset() { + @Override public int getQualifierOffset() { return cell.getQualifierOffset(); } - @Override - public int getQualifierLength() { + @Override public int getQualifierLength() { return cell.getQualifierLength(); } - @Override - public long getTimestamp() { + @Override public long getTimestamp() { return cell.getTimestamp(); } - @Override - public byte getTypeByte() { + @Override public byte getTypeByte() { return cell.getTypeByte(); } - @Override - public void setSequenceId(long seqId) throws IOException { + @Override public void setSequenceId(long seqId) throws IOException { cell.setSequenceId(seqId); } - @Override - public void setTimestamp(long ts) throws IOException { + @Override public void setTimestamp(long ts) throws IOException { cell.setTimestamp(ts); } - @Override - public void setTimestamp(byte[] ts) throws IOException { + @Override public void setTimestamp(byte[] ts) throws IOException { cell.setTimestamp(ts); } - @Override - public long getSequenceId() { + @Override public long getSequenceId() { return 0; } - @Override - public Type getType() { + @Override public Type getType() { return cell.getType(); } - @Override - public byte[] getValueArray() { + @Override public byte[] getValueArray() { if (lenAsVal) { return Bytes.toBytes(cell.getValueLength()); } else { @@ -347,13 +328,11 @@ public class KeyOnlyFilter extends FilterBase { } } - @Override - public int getValueOffset() { + @Override public int getValueOffset() { return 0; } - @Override - public int getValueLength() { + @Override public int getValueLength() { if (lenAsVal) { return Bytes.SIZEOF_INT; } else { @@ -361,53 +340,43 @@ public class KeyOnlyFilter extends FilterBase { } } - @Override - public byte[] getTagsArray() { + @Override public byte[] getTagsArray() { return HConstants.EMPTY_BYTE_ARRAY; } - @Override - public int getTagsOffset() { + @Override public int getTagsOffset() { return 0; } - @Override - public int getTagsLength() { + @Override public int getTagsLength() { return 0; } - @Override - public ByteBuffer getRowByteBuffer() { + @Override public ByteBuffer getRowByteBuffer() { return cell.getRowByteBuffer(); } - @Override - public int getRowPosition() { + @Override public int getRowPosition() { return cell.getRowPosition(); } - @Override - public ByteBuffer getFamilyByteBuffer() { + @Override public ByteBuffer getFamilyByteBuffer() { return cell.getFamilyByteBuffer(); } - @Override - public int getFamilyPosition() { + @Override public int getFamilyPosition() { return cell.getFamilyPosition(); } - @Override - public ByteBuffer getQualifierByteBuffer() { + @Override public ByteBuffer getQualifierByteBuffer() { return cell.getQualifierByteBuffer(); } - @Override - public int getQualifierPosition() { + @Override public int getQualifierPosition() { return cell.getQualifierPosition(); } - @Override - public ByteBuffer getValueByteBuffer() { + @Override public ByteBuffer getValueByteBuffer() { if (lenAsVal) { return ByteBuffer.wrap(Bytes.toBytes(cell.getValueLength())); } else { @@ -415,33 +384,27 @@ public class KeyOnlyFilter extends FilterBase { } } - @Override - public int getValuePosition() { + @Override public int getValuePosition() { return 0; } - @Override - public ByteBuffer getTagsByteBuffer() { + @Override public ByteBuffer getTagsByteBuffer() { return HConstants.EMPTY_BYTE_BUFFER; } - @Override - public int getTagsPosition() { + @Override public int getTagsPosition() { return 0; } - @Override - public Iterator getTags() { + @Override public Iterator getTags() { return Collections.emptyIterator(); } - @Override - public Optional getTag(byte type) { + @Override public Optional getTag(byte type) { return Optional.empty(); } - @Override - public long heapSize() { + @Override public long heapSize() { return ClassSize.align(FIXED_OVERHEAD + cell.heapSize()); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java index dfd6297771..f54183b16b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.filter; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -512,5 +513,41 @@ public class MultiRowRangeFilter extends FilterBase { || Bytes.compareTo(startRow, stopRow) < 0 || (Bytes.compareTo(startRow, stopRow) == 0 && stopRowInclusive == true); } + @Override + public boolean equals(Object obj){ + if (obj == null || (!(obj instanceof RowRange))) { + return false; + } + if (this == obj) { + return true; + } + RowRange rr = (RowRange) obj; + return Bytes.equals(this.stopRow, rr.getStopRow()) + &&Bytes.equals(this.startRow,this.getStartRow()) + &&this.startRowInclusive==rr.isStartRowInclusive() + &&this.stopRowInclusive==rr.isStopRowInclusive(); + } + @Override + public int hashCode() { + return Objects.hash(Bytes.hashCode(this.stopRow), + Bytes.hashCode(this.startRow), + this.startRowInclusive, + this.stopRowInclusive); + } + } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof MultiRowRangeFilter))) { + return false; + } + MultiRowRangeFilter f = (MultiRowRangeFilter) obj; + return this.areSerializedFieldsEqual(f); } + + @Override + public int hashCode() { + return Objects.hash(this.rangeList.toArray()); + } + } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java index 47feea7f36..2eec39ec2a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Comparator; import java.util.TreeSet; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -37,44 +38,39 @@ import org.apache.hadoop.hbase.util.Bytes; * a particular prefix. For example, if prefix is 'an', it will pass keys will * columns like 'and', 'anti' but not keys with columns like 'ball', 'act'. */ -@InterfaceAudience.Public -public class MultipleColumnPrefixFilter extends FilterBase { - protected byte [] hint = null; - protected TreeSet sortedPrefixes = createTreeSet(); +@InterfaceAudience.Public public class MultipleColumnPrefixFilter extends FilterBase { + protected byte[] hint = null; + protected TreeSet sortedPrefixes = createTreeSet(); private final static int MAX_LOG_PREFIXES = 5; - public MultipleColumnPrefixFilter(final byte [][] prefixes) { + public MultipleColumnPrefixFilter(final byte[][] prefixes) { if (prefixes != null) { for (int i = 0; i < prefixes.length; i++) { if (!sortedPrefixes.add(prefixes[i])) - throw new IllegalArgumentException ("prefixes must be distinct"); + throw new IllegalArgumentException("prefixes must be distinct"); } } } - public byte [][] getPrefix() { + public byte[][] getPrefix() { int count = 0; - byte [][] temp = new byte [sortedPrefixes.size()][]; - for (byte [] prefixes : sortedPrefixes) { - temp [count++] = prefixes; + byte[][] temp = new byte[sortedPrefixes.size()][]; + for (byte[] prefixes : sortedPrefixes) { + temp[count++] = prefixes; } return temp; } - @Override - public boolean filterRowKey(Cell cell) throws IOException { + @Override public boolean filterRowKey(Cell cell) throws IOException { // Impl in FilterBase might do unnecessary copy for Off heap backed Cells. return false; } - @Deprecated - @Override - public ReturnCode filterKeyValue(final Cell c) { + @Deprecated @Override public ReturnCode filterKeyValue(final Cell c) { return filterCell(c); } - @Override - public ReturnCode filterCell(final Cell c) { + @Override public ReturnCode filterCell(final Cell c) { if (sortedPrefixes.isEmpty()) { return ReturnCode.INCLUDE; } else { @@ -83,17 +79,17 @@ public class MultipleColumnPrefixFilter extends FilterBase { } public ReturnCode filterColumn(Cell cell) { - byte [] qualifier = CellUtil.cloneQualifier(cell); - TreeSet lesserOrEqualPrefixes = - (TreeSet) sortedPrefixes.headSet(qualifier, true); + byte[] qualifier = CellUtil.cloneQualifier(cell); + TreeSet lesserOrEqualPrefixes = + (TreeSet) sortedPrefixes.headSet(qualifier, true); if (lesserOrEqualPrefixes.size() != 0) { - byte [] largestPrefixSmallerThanQualifier = lesserOrEqualPrefixes.last(); - + byte[] largestPrefixSmallerThanQualifier = lesserOrEqualPrefixes.last(); + if (Bytes.startsWith(qualifier, largestPrefixSmallerThanQualifier)) { return ReturnCode.INCLUDE; } - + if (lesserOrEqualPrefixes.size() == sortedPrefixes.size()) { return ReturnCode.NEXT_ROW; } else { @@ -106,10 +102,10 @@ public class MultipleColumnPrefixFilter extends FilterBase { } } - public static Filter createFilterFromArguments(ArrayList filterArguments) { - byte [][] prefixes = new byte [filterArguments.size()][]; - for (int i = 0 ; i < filterArguments.size(); i++) { - byte [] columnPrefix = ParseFilter.removeQuotesFromByteArray(filterArguments.get(i)); + public static Filter createFilterFromArguments(ArrayList filterArguments) { + byte[][] prefixes = new byte[filterArguments.size()][]; + for (int i = 0; i < filterArguments.size(); i++) { + byte[] columnPrefix = ParseFilter.removeQuotesFromByteArray(filterArguments.get(i)); prefixes[i] = columnPrefix; } return new MultipleColumnPrefixFilter(prefixes); @@ -118,12 +114,13 @@ public class MultipleColumnPrefixFilter extends FilterBase { /** * @return The filter serialized using pb */ - @Override - public byte [] toByteArray() { + @Override public byte[] toByteArray() { FilterProtos.MultipleColumnPrefixFilter.Builder builder = - FilterProtos.MultipleColumnPrefixFilter.newBuilder(); - for (byte [] element : sortedPrefixes) { - if (element != null) builder.addSortedPrefixes(UnsafeByteOperations.unsafeWrap(element)); + FilterProtos.MultipleColumnPrefixFilter.newBuilder(); + for (byte[] element : sortedPrefixes) { + if (element != null) { + builder.addSortedPrefixes(UnsafeByteOperations.unsafeWrap(element)); + } } return builder.build().toByteArray(); } @@ -134,8 +131,8 @@ public class MultipleColumnPrefixFilter extends FilterBase { * @throws DeserializationException * @see #toByteArray */ - public static MultipleColumnPrefixFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static MultipleColumnPrefixFilter parseFrom(final byte[] pbBytes) + throws DeserializationException { FilterProtos.MultipleColumnPrefixFilter proto; try { proto = FilterProtos.MultipleColumnPrefixFilter.parseFrom(pbBytes); @@ -143,7 +140,7 @@ public class MultipleColumnPrefixFilter extends FilterBase { throw new DeserializationException(e); } int numPrefixes = proto.getSortedPrefixesCount(); - byte [][] prefixes = new byte[numPrefixes][]; + byte[][] prefixes = new byte[numPrefixes][]; for (int i = 0; i < numPrefixes; ++i) { prefixes[i] = proto.getSortedPrefixes(i).toByteArray(); } @@ -156,36 +153,37 @@ public class MultipleColumnPrefixFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ - @Override - boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof MultipleColumnPrefixFilter)) return false; + @Override boolean areSerializedFieldsEqual(Filter o) { + if (o == this) { + return true; + } + if (!(o instanceof MultipleColumnPrefixFilter)) { + return false; + } - MultipleColumnPrefixFilter other = (MultipleColumnPrefixFilter)o; + MultipleColumnPrefixFilter other = (MultipleColumnPrefixFilter) o; return this.sortedPrefixes.equals(other.sortedPrefixes); } - @Override - public Cell getNextCellHint(Cell cell) { + @Override public Cell getNextCellHint(Cell cell) { return PrivateCellUtil.createFirstOnRowCol(cell, hint, 0, hint.length); } - public TreeSet createTreeSet() { + public TreeSet createTreeSet() { return new TreeSet<>(new Comparator() { - @Override - public int compare (Object o1, Object o2) { - if (o1 == null || o2 == null) - throw new IllegalArgumentException ("prefixes can't be null"); - - byte [] b1 = (byte []) o1; - byte [] b2 = (byte []) o2; - return Bytes.compareTo (b1, 0, b1.length, b2, 0, b2.length); + @Override public int compare(Object o1, Object o2) { + if (o1 == null || o2 == null) { + throw new IllegalArgumentException("prefixes can't be null"); } - }); + + byte[] b1 = (byte[]) o1; + byte[] b2 = (byte[]) o2; + return Bytes.compareTo(b1, 0, b1.length, b2, 0, b2.length); + } + }); } - @Override - public String toString() { + @Override public String toString() { return toString(MAX_LOG_PREFIXES); } @@ -204,7 +202,20 @@ public class MultipleColumnPrefixFilter extends FilterBase { } } - return String.format("%s (%d/%d): [%s]", this.getClass().getSimpleName(), - count, this.sortedPrefixes.size(), prefixes.toString()); + return String.format("%s (%d/%d): [%s]", this.getClass().getSimpleName(), count, + this.sortedPrefixes.size(), prefixes.toString()); } + + @Override public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof MultipleColumnPrefixFilter))) { + return false; + } + MultipleColumnPrefixFilter f = (MultipleColumnPrefixFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override public int hashCode() { + return Objects.hash(this.sortedPrefixes); + } + } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java index 30ddf24a78..06f777d629 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -150,4 +151,18 @@ public class PageFilter extends FilterBase { public String toString() { return this.getClass().getSimpleName() + " " + this.pageSize; } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof PageFilter))) { + return false; + } + PageFilter f = (PageFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.pageSize); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java index df9177bf8a..e243933a5b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.filter; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.ByteBufferExtendedCell; import org.apache.hadoop.hbase.Cell; @@ -156,4 +157,18 @@ public class PrefixFilter extends FilterBase { public String toString() { return this.getClass().getSimpleName() + " " + Bytes.toStringBinary(this.prefix); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof PrefixFilter))) { + return false; + } + PrefixFilter f = (PrefixFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(Bytes.hashCode(this.getPrefix())); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java index b38c0108df..ebf6eb0a34 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; - +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; import org.apache.yetus.audience.InterfaceAudience; @@ -55,8 +55,7 @@ public class QualifierFilter extends CompareFilter { * Use {@link #QualifierFilter(CompareOperator, ByteArrayComparable)} instead. */ @Deprecated - public QualifierFilter(final CompareOp op, - final ByteArrayComparable qualifierComparator) { + public QualifierFilter(final CompareOp op, final ByteArrayComparable qualifierComparator) { super(op, qualifierComparator); } @@ -65,8 +64,7 @@ public class QualifierFilter extends CompareFilter { * @param op the compare op for column qualifier matching * @param qualifierComparator the comparator for column qualifier matching */ - public QualifierFilter(final CompareOperator op, - final ByteArrayComparable qualifierComparator) { + public QualifierFilter(final CompareOperator op, final ByteArrayComparable qualifierComparator) { super(op, qualifierComparator); } @@ -87,10 +85,10 @@ public class QualifierFilter extends CompareFilter { return ReturnCode.INCLUDE; } - public static Filter createFilterFromArguments(ArrayList filterArguments) { + public static Filter createFilterFromArguments(ArrayList filterArguments) { ArrayList arguments = CompareFilter.extractArguments(filterArguments); - CompareOperator compareOp = (CompareOperator)arguments.get(0); - ByteArrayComparable comparator = (ByteArrayComparable)arguments.get(1); + CompareOperator compareOp = (CompareOperator) arguments.get(0); + ByteArrayComparable comparator = (ByteArrayComparable) arguments.get(1); return new QualifierFilter(compareOp, comparator); } @@ -98,9 +96,8 @@ public class QualifierFilter extends CompareFilter { * @return The filter serialized using pb */ @Override - public byte [] toByteArray() { - FilterProtos.QualifierFilter.Builder builder = - FilterProtos.QualifierFilter.newBuilder(); + public byte[] toByteArray() { + FilterProtos.QualifierFilter.Builder builder = FilterProtos.QualifierFilter.newBuilder(); builder.setCompareFilter(super.convert()); return builder.build().toByteArray(); } @@ -111,16 +108,14 @@ public class QualifierFilter extends CompareFilter { * @throws org.apache.hadoop.hbase.exceptions.DeserializationException * @see #toByteArray */ - public static QualifierFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static QualifierFilter parseFrom(final byte[] pbBytes) throws DeserializationException { FilterProtos.QualifierFilter proto; try { proto = FilterProtos.QualifierFilter.parseFrom(pbBytes); } catch (InvalidProtocolBufferException e) { throw new DeserializationException(e); } - final CompareOperator valueCompareOp = - CompareOperator.valueOf(proto.getCompareFilter().getCompareOp().name()); + final CompareOperator valueCompareOp = CompareOperator.valueOf(proto.getCompareFilter().getCompareOp().name()); ByteArrayComparable valueComparator = null; try { if (proto.getCompareFilter().hasComparator()) { @@ -129,7 +124,7 @@ public class QualifierFilter extends CompareFilter { } catch (IOException ioe) { throw new DeserializationException(ioe); } - return new QualifierFilter(valueCompareOp,valueComparator); + return new QualifierFilter(valueCompareOp, valueComparator); } /** @@ -138,9 +133,27 @@ public class QualifierFilter extends CompareFilter { */ @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof QualifierFilter)) return false; + if (o == this) { + return true; + } + if (!(o instanceof QualifierFilter)) { + return false; + } return super.areSerializedFieldsEqual(o); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof QualifierFilter))) { + return false; + } + QualifierFilter f = (QualifierFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.getComparator(), this.getCompareOperator()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java index 58624d27ff..369ddd7df4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.filter; +import java.util.Objects; import java.util.Random; import org.apache.hadoop.hbase.Cell; @@ -117,9 +118,8 @@ public class RandomRowFilter extends FilterBase { * @return The filter serialized using pb */ @Override - public byte [] toByteArray() { - FilterProtos.RandomRowFilter.Builder builder = - FilterProtos.RandomRowFilter.newBuilder(); + public byte[] toByteArray() { + FilterProtos.RandomRowFilter.Builder builder = FilterProtos.RandomRowFilter.newBuilder(); builder.setChance(this.chance); return builder.build().toByteArray(); } @@ -130,8 +130,7 @@ public class RandomRowFilter extends FilterBase { * @throws DeserializationException * @see #toByteArray */ - public static RandomRowFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static RandomRowFilter parseFrom(final byte[] pbBytes) throws DeserializationException { FilterProtos.RandomRowFilter proto; try { proto = FilterProtos.RandomRowFilter.parseFrom(pbBytes); @@ -148,10 +147,28 @@ public class RandomRowFilter extends FilterBase { */ @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof RandomRowFilter)) return false; + if (o == this) { + return true; + } + if (!(o instanceof RandomRowFilter)) { + return false; + } - RandomRowFilter other = (RandomRowFilter)o; + RandomRowFilter other = (RandomRowFilter) o; return this.getChance() == other.getChance(); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof RandomRowFilter))) { + return false; + } + RandomRowFilter f = (RandomRowFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.getChance()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java index 8eba3ba2fb..435caab38b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; - +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; import org.apache.yetus.audience.InterfaceAudience; @@ -160,4 +160,18 @@ public class RowFilter extends CompareFilter { return super.areSerializedFieldsEqual(o); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof RowFilter))) { + return false; + } + RowFilter f = (RowFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.getComparator(), this.getCompareOperator()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java index 566c408d01..ccee147a54 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java @@ -1,5 +1,4 @@ /** - * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -7,9 +6,9 @@ * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

* Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,6 +22,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; +import org.apache.hadoop.hbase.util.Bytes; import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; /** @@ -40,8 +41,8 @@ import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferExce * {@link SingleColumnValueFilter}, if the tested column value is not actually * needed as input (besides for the filtering itself). */ -@InterfaceAudience.Public -public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { +@InterfaceAudience.Public public class SingleColumnValueExcludeFilter + extends SingleColumnValueFilter { /** * Constructor for binary compare of the value of a single column. If the @@ -55,8 +56,7 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { * @param value value to compare column values against * {@link #SingleColumnValueExcludeFilter(byte[], byte[], CompareOperator, byte[])} */ - @Deprecated - public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, + @Deprecated public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, CompareOp compareOp, byte[] value) { super(family, qualifier, compareOp, value); } @@ -72,8 +72,8 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { * @param op operator * @param value value to compare column values against */ - public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, - CompareOperator op, byte[] value) { + public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, CompareOperator op, + byte[] value) { super(family, qualifier, op, value); } @@ -94,8 +94,7 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use * {@link #SingleColumnValueExcludeFilter(byte[], byte[], CompareOperator, ByteArrayComparable)} */ - @Deprecated - public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, + @Deprecated public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, CompareOp compareOp, ByteArrayComparable comparator) { super(family, qualifier, compareOp, comparator); } @@ -115,12 +114,11 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { * @param op operator * @param comparator Comparator to use. */ - public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, - CompareOperator op, ByteArrayComparable comparator) { + public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, CompareOperator op, + ByteArrayComparable comparator) { super(family, qualifier, op, comparator); } - /** * Constructor for protobuf deserialization only. * @param family @@ -132,12 +130,11 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use * {@link #SingleColumnValueExcludeFilter(byte[], byte[], CompareOperator, ByteArrayComparable, boolean, boolean)} */ - @Deprecated - protected SingleColumnValueExcludeFilter(final byte[] family, final byte[] qualifier, + @Deprecated protected SingleColumnValueExcludeFilter(final byte[] family, final byte[] qualifier, final CompareOp compareOp, ByteArrayComparable comparator, final boolean filterIfMissing, final boolean latestVersionOnly) { - this(family, qualifier, CompareOperator.valueOf(compareOp.name()), comparator, - filterIfMissing, latestVersionOnly); + this(family, qualifier, CompareOperator.valueOf(compareOp.name()), comparator, filterIfMissing, + latestVersionOnly); } /** @@ -156,14 +153,12 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { } // We cleaned result row in FilterRow to be consistent with scanning process. - @Override - public boolean hasFilterRow() { - return true; + @Override public boolean hasFilterRow() { + return true; } // Here we remove from row all key values from testing column - @Override - public void filterRowCells(List kvs) { + @Override public void filterRowCells(List kvs) { Iterator it = kvs.iterator(); while (it.hasNext()) { // If the current column is actually the tested column, @@ -174,12 +169,12 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { } } - public static Filter createFilterFromArguments(ArrayList filterArguments) { - SingleColumnValueFilter tempFilter = (SingleColumnValueFilter) - SingleColumnValueFilter.createFilterFromArguments(filterArguments); - SingleColumnValueExcludeFilter filter = new SingleColumnValueExcludeFilter ( - tempFilter.getFamily(), tempFilter.getQualifier(), - tempFilter.getOperator(), tempFilter.getComparator()); + public static Filter createFilterFromArguments(ArrayList filterArguments) { + SingleColumnValueFilter tempFilter = (SingleColumnValueFilter) SingleColumnValueFilter + .createFilterFromArguments(filterArguments); + SingleColumnValueExcludeFilter filter = + new SingleColumnValueExcludeFilter(tempFilter.getFamily(), tempFilter.getQualifier(), + tempFilter.getOperator(), tempFilter.getComparator()); if (filterArguments.size() == 6) { filter.setFilterIfMissing(tempFilter.getFilterIfMissing()); @@ -191,10 +186,9 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { /** * @return The filter serialized using pb */ - @Override - public byte [] toByteArray() { + @Override public byte[] toByteArray() { FilterProtos.SingleColumnValueExcludeFilter.Builder builder = - FilterProtos.SingleColumnValueExcludeFilter.newBuilder(); + FilterProtos.SingleColumnValueExcludeFilter.newBuilder(); builder.setSingleColumnValueFilter(super.convert()); return builder.build().toByteArray(); } @@ -205,8 +199,8 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { * @throws DeserializationException * @see #toByteArray */ - public static SingleColumnValueExcludeFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static SingleColumnValueExcludeFilter parseFrom(final byte[] pbBytes) + throws DeserializationException { FilterProtos.SingleColumnValueExcludeFilter proto; try { proto = FilterProtos.SingleColumnValueExcludeFilter.parseFrom(pbBytes); @@ -215,8 +209,7 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { } FilterProtos.SingleColumnValueFilter parentProto = proto.getSingleColumnValueFilter(); - final CompareOperator compareOp = - CompareOperator.valueOf(parentProto.getCompareOp().name()); + final CompareOperator compareOp = CompareOperator.valueOf(parentProto.getCompareOp().name()); final ByteArrayComparable comparator; try { comparator = ProtobufUtil.toComparator(parentProto.getComparator()); @@ -224,21 +217,43 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { throw new DeserializationException(ioe); } - return new SingleColumnValueExcludeFilter(parentProto.hasColumnFamily() ? parentProto - .getColumnFamily().toByteArray() : null, parentProto.hasColumnQualifier() ? parentProto - .getColumnQualifier().toByteArray() : null, compareOp, comparator, parentProto - .getFilterIfMissing(), parentProto.getLatestVersionOnly()); + return new SingleColumnValueExcludeFilter( + parentProto.hasColumnFamily() ? parentProto.getColumnFamily().toByteArray() : null, + parentProto.hasColumnQualifier() ? parentProto.getColumnQualifier().toByteArray() : null, + compareOp, comparator, parentProto.getFilterIfMissing(), + parentProto.getLatestVersionOnly()); } /** * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ - @Override - boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof SingleColumnValueExcludeFilter)) return false; + @Override boolean areSerializedFieldsEqual(Filter o) { + if (o == this) { + return true; + } + if (!(o instanceof SingleColumnValueExcludeFilter)) { + return false; + } return super.areSerializedFieldsEqual(o); } + + @Override public boolean equals(Object obj) { + if (obj == null || (!(SingleColumnValueExcludeFilter.class.isInstance(obj)))) { + return false; + } + SingleColumnValueExcludeFilter other = (SingleColumnValueExcludeFilter) obj; + return Bytes.equals(this.getFamily(), other.getFamily()) && Bytes + .equals(this.getQualifier(), other.getQualifier()) && this.op.equals(other.op) && this + .getComparator().areSerializedFieldsEqual(other.getComparator()) + && this.getFilterIfMissing() == other.getFilterIfMissing() + && this.getLatestVersionOnly() == other.getLatestVersionOnly(); + } + + @Override public int hashCode() { + return Objects.hash(this.getClass().getName(), Bytes.hashCode(this.getFamily()), + Bytes.hashCode(this.getQualifier()), this.op, this.getComparator(), + this.getFilterIfMissing(), this.getLatestVersionOnly()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java index e5c83b1d72..49c094ec47 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java @@ -1,5 +1,4 @@ /** - * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -7,9 +6,9 @@ * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

* Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -19,8 +18,9 @@ package org.apache.hadoop.hbase.filter; -import java.io.IOException; import java.util.ArrayList; +import java.io.IOException; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -67,11 +67,10 @@ import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; *

* To filter based on the value of all scanned columns, use {@link ValueFilter}. */ -@InterfaceAudience.Public -public class SingleColumnValueFilter extends FilterBase { +@InterfaceAudience.Public public class SingleColumnValueFilter extends FilterBase { - protected byte [] columnFamily; - protected byte [] columnQualifier; + protected byte[] columnFamily; + protected byte[] columnQualifier; protected CompareOperator op; protected org.apache.hadoop.hbase.filter.ByteArrayComparable comparator; protected boolean foundColumn = false; @@ -95,11 +94,10 @@ public class SingleColumnValueFilter extends FilterBase { * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use * {@link #SingleColumnValueFilter(byte[], byte[], CompareOperator, byte[])} instead. */ - @Deprecated - public SingleColumnValueFilter(final byte [] family, final byte [] qualifier, + @Deprecated public SingleColumnValueFilter(final byte[] family, final byte[] qualifier, final CompareOp compareOp, final byte[] value) { this(family, qualifier, CompareOperator.valueOf(compareOp.name()), - new org.apache.hadoop.hbase.filter.BinaryComparator(value)); + new org.apache.hadoop.hbase.filter.BinaryComparator(value)); } /** @@ -116,10 +114,9 @@ public class SingleColumnValueFilter extends FilterBase { * @param op operator * @param value value to compare column values against */ - public SingleColumnValueFilter(final byte [] family, final byte [] qualifier, - final CompareOperator op, final byte[] value) { - this(family, qualifier, op, - new org.apache.hadoop.hbase.filter.BinaryComparator(value)); + public SingleColumnValueFilter(final byte[] family, final byte[] qualifier, + final CompareOperator op, final byte[] value) { + this(family, qualifier, op, new org.apache.hadoop.hbase.filter.BinaryComparator(value)); } /** @@ -138,8 +135,7 @@ public class SingleColumnValueFilter extends FilterBase { * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use * {@link #SingleColumnValueFilter(byte[], byte[], CompareOperator, ByteArrayComparable)} instead. */ - @Deprecated - public SingleColumnValueFilter(final byte [] family, final byte [] qualifier, + @Deprecated public SingleColumnValueFilter(final byte[] family, final byte[] qualifier, final CompareOp compareOp, final org.apache.hadoop.hbase.filter.ByteArrayComparable comparator) { this(family, qualifier, CompareOperator.valueOf(compareOp.name()), comparator); @@ -159,7 +155,7 @@ public class SingleColumnValueFilter extends FilterBase { * @param op operator * @param comparator Comparator to use. */ - public SingleColumnValueFilter(final byte [] family, final byte [] qualifier, + public SingleColumnValueFilter(final byte[] family, final byte[] qualifier, final CompareOperator op, final org.apache.hadoop.hbase.filter.ByteArrayComparable comparator) { this.columnFamily = family; @@ -177,16 +173,13 @@ public class SingleColumnValueFilter extends FilterBase { * @param filterIfMissing * @param latestVersionOnly * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use - * {@link #SingleColumnValueFilter(byte[], byte[], CompareOperator, ByteArrayComparable, - * boolean, boolean)} instead. + * {@link #SingleColumnValueFilter(byte[], byte[], CompareOperator, ByteArrayComparable, * boolean, boolean)} instead. */ - @Deprecated - protected SingleColumnValueFilter(final byte[] family, final byte[] qualifier, + @Deprecated protected SingleColumnValueFilter(final byte[] family, final byte[] qualifier, final CompareOp compareOp, org.apache.hadoop.hbase.filter.ByteArrayComparable comparator, - final boolean filterIfMissing, - final boolean latestVersionOnly) { + final boolean filterIfMissing, final boolean latestVersionOnly) { this(family, qualifier, CompareOperator.valueOf(compareOp.name()), comparator, filterIfMissing, - latestVersionOnly); + latestVersionOnly); } /** @@ -200,7 +193,7 @@ public class SingleColumnValueFilter extends FilterBase { */ protected SingleColumnValueFilter(final byte[] family, final byte[] qualifier, final CompareOperator op, org.apache.hadoop.hbase.filter.ByteArrayComparable comparator, - final boolean filterIfMissing, final boolean latestVersionOnly) { + final boolean filterIfMissing, final boolean latestVersionOnly) { this(family, qualifier, op, comparator); this.filterIfMissing = filterIfMissing; this.latestVersionOnly = latestVersionOnly; @@ -208,10 +201,9 @@ public class SingleColumnValueFilter extends FilterBase { /** * @return operator - * @deprecated since 2.0.0. Will be removed in 3.0.0. Use {@link #getCompareOperator()} instead. + * @deprecated since 2.0.0. Will be removed in 3.0.0. Use {@link #getCompareOperator()} instead. */ - @Deprecated - public CompareOp getOperator() { + @Deprecated public CompareOp getOperator() { return CompareOp.valueOf(op.name()); } @@ -240,20 +232,16 @@ public class SingleColumnValueFilter extends FilterBase { return columnQualifier; } - @Override - public boolean filterRowKey(Cell cell) throws IOException { + @Override public boolean filterRowKey(Cell cell) throws IOException { // Impl in FilterBase might do unnecessary copy for Off heap backed Cells. return false; } - @Deprecated - @Override - public ReturnCode filterKeyValue(final Cell c) { + @Deprecated @Override public ReturnCode filterKeyValue(final Cell c) { return filterCell(c); } - @Override - public ReturnCode filterCell(final Cell c) { + @Override public ReturnCode filterCell(final Cell c) { // System.out.println("REMOVE KEY=" + keyValue.toString() + ", value=" + Bytes.toString(keyValue.getValue())); if (this.matchedColumn) { // We already found and matched the single column, all keys now pass @@ -267,7 +255,7 @@ public class SingleColumnValueFilter extends FilterBase { } foundColumn = true; if (filterColumnValue(c)) { - return this.latestVersionOnly? ReturnCode.NEXT_ROW: ReturnCode.INCLUDE; + return this.latestVersionOnly ? ReturnCode.NEXT_ROW : ReturnCode.INCLUDE; } this.matchedColumn = true; return ReturnCode.INCLUDE; @@ -278,20 +266,17 @@ public class SingleColumnValueFilter extends FilterBase { return CompareFilter.compare(this.op, compareResult); } - @Override - public boolean filterRow() { + @Override public boolean filterRow() { // If column was found, return false if it was matched, true if it was not // If column not found, return true if we filter if missing, false if not - return this.foundColumn? !this.matchedColumn: this.filterIfMissing; + return this.foundColumn ? !this.matchedColumn : this.filterIfMissing; } - - @Override - public boolean hasFilterRow() { + + @Override public boolean hasFilterRow() { return true; } - @Override - public void reset() { + @Override public void reset() { foundColumn = false; matchedColumn = false; } @@ -339,26 +324,23 @@ public class SingleColumnValueFilter extends FilterBase { this.latestVersionOnly = latestVersionOnly; } - public static Filter createFilterFromArguments(ArrayList filterArguments) { + public static Filter createFilterFromArguments(ArrayList filterArguments) { Preconditions.checkArgument(filterArguments.size() == 4 || filterArguments.size() == 6, - "Expected 4 or 6 but got: %s", filterArguments.size()); - byte [] family = ParseFilter.removeQuotesFromByteArray(filterArguments.get(0)); - byte [] qualifier = ParseFilter.removeQuotesFromByteArray(filterArguments.get(1)); + "Expected 4 or 6 but got: %s", filterArguments.size()); + byte[] family = ParseFilter.removeQuotesFromByteArray(filterArguments.get(0)); + byte[] qualifier = ParseFilter.removeQuotesFromByteArray(filterArguments.get(1)); CompareOperator op = ParseFilter.createCompareOperator(filterArguments.get(2)); - org.apache.hadoop.hbase.filter.ByteArrayComparable comparator = ParseFilter.createComparator( - ParseFilter.removeQuotesFromByteArray(filterArguments.get(3))); - - if (comparator instanceof RegexStringComparator || - comparator instanceof SubstringComparator) { - if (op != CompareOperator.EQUAL && - op != CompareOperator.NOT_EQUAL) { - throw new IllegalArgumentException ("A regexstring comparator and substring comparator " + - "can only be used with EQUAL and NOT_EQUAL"); + org.apache.hadoop.hbase.filter.ByteArrayComparable comparator = + ParseFilter.createComparator(ParseFilter.removeQuotesFromByteArray(filterArguments.get(3))); + + if (comparator instanceof RegexStringComparator || comparator instanceof SubstringComparator) { + if (op != CompareOperator.EQUAL && op != CompareOperator.NOT_EQUAL) { + throw new IllegalArgumentException("A regexstring comparator and substring comparator " + + "can only be used with EQUAL and NOT_EQUAL"); } } - SingleColumnValueFilter filter = new SingleColumnValueFilter(family, qualifier, - op, comparator); + SingleColumnValueFilter filter = new SingleColumnValueFilter(family, qualifier, op, comparator); if (filterArguments.size() == 6) { boolean filterIfMissing = ParseFilter.convertByteArrayToBoolean(filterArguments.get(4)); @@ -371,7 +353,7 @@ public class SingleColumnValueFilter extends FilterBase { FilterProtos.SingleColumnValueFilter convert() { FilterProtos.SingleColumnValueFilter.Builder builder = - FilterProtos.SingleColumnValueFilter.newBuilder(); + FilterProtos.SingleColumnValueFilter.newBuilder(); if (this.columnFamily != null) { builder.setColumnFamily(UnsafeByteOperations.unsafeWrap(this.columnFamily)); } @@ -390,8 +372,7 @@ public class SingleColumnValueFilter extends FilterBase { /** * @return The filter serialized using pb */ - @Override - public byte [] toByteArray() { + @Override public byte[] toByteArray() { return convert().toByteArray(); } @@ -401,8 +382,8 @@ public class SingleColumnValueFilter extends FilterBase { * @throws org.apache.hadoop.hbase.exceptions.DeserializationException * @see #toByteArray */ - public static SingleColumnValueFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static SingleColumnValueFilter parseFrom(final byte[] pbBytes) + throws DeserializationException { FilterProtos.SingleColumnValueFilter proto; try { proto = FilterProtos.SingleColumnValueFilter.parseFrom(pbBytes); @@ -410,8 +391,7 @@ public class SingleColumnValueFilter extends FilterBase { throw new DeserializationException(e); } - final CompareOperator compareOp = - CompareOperator.valueOf(proto.getCompareOp().name()); + final CompareOperator compareOp = CompareOperator.valueOf(proto.getCompareOp().name()); final org.apache.hadoop.hbase.filter.ByteArrayComparable comparator; try { comparator = ProtobufUtil.toComparator(proto.getComparator()); @@ -419,28 +399,30 @@ public class SingleColumnValueFilter extends FilterBase { throw new DeserializationException(ioe); } - return new SingleColumnValueFilter(proto.hasColumnFamily() ? proto.getColumnFamily() - .toByteArray() : null, proto.hasColumnQualifier() ? proto.getColumnQualifier() - .toByteArray() : null, compareOp, comparator, proto.getFilterIfMissing(), proto - .getLatestVersionOnly()); + return new SingleColumnValueFilter( + proto.hasColumnFamily() ? proto.getColumnFamily().toByteArray() : null, + proto.hasColumnQualifier() ? proto.getColumnQualifier().toByteArray() : null, compareOp, + comparator, proto.getFilterIfMissing(), proto.getLatestVersionOnly()); } /** * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ - @Override - boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof SingleColumnValueFilter)) return false; - - SingleColumnValueFilter other = (SingleColumnValueFilter)o; - return Bytes.equals(this.getFamily(), other.getFamily()) - && Bytes.equals(this.getQualifier(), other.getQualifier()) - && this.op.equals(other.op) - && this.getComparator().areSerializedFieldsEqual(other.getComparator()) - && this.getFilterIfMissing() == other.getFilterIfMissing() - && this.getLatestVersionOnly() == other.getLatestVersionOnly(); + @Override boolean areSerializedFieldsEqual(Filter o) { + if (o == this) { + return true; + } + if (!(o instanceof SingleColumnValueFilter)) { + return false; + } + + SingleColumnValueFilter other = (SingleColumnValueFilter) o; + return Bytes.equals(this.getFamily(), other.getFamily()) && Bytes + .equals(this.getQualifier(), other.getQualifier()) && this.op.equals(other.op) && this + .getComparator().areSerializedFieldsEqual(other.getComparator()) + && this.getFilterIfMissing() == other.getFilterIfMissing() + && this.getLatestVersionOnly() == other.getLatestVersionOnly(); } /** @@ -448,16 +430,27 @@ public class SingleColumnValueFilter extends FilterBase { * column in whole scan. If filterIfMissing == false, all families are essential, * because of possibility of skipping the rows without any data in filtered CF. */ - @Override - public boolean isFamilyEssential(byte[] name) { + @Override public boolean isFamilyEssential(byte[] name) { return !this.filterIfMissing || Bytes.equals(name, this.columnFamily); } - @Override - public String toString() { - return String.format("%s (%s, %s, %s, %s)", - this.getClass().getSimpleName(), Bytes.toStringBinary(this.columnFamily), - Bytes.toStringBinary(this.columnQualifier), this.op.name(), - Bytes.toStringBinary(this.comparator.getValue())); + @Override public String toString() { + return String.format("%s (%s, %s, %s, %s)", this.getClass().getSimpleName(), + Bytes.toStringBinary(this.columnFamily), Bytes.toStringBinary(this.columnQualifier), + this.op.name(), Bytes.toStringBinary(this.comparator.getValue())); + } + + @Override public boolean equals(Object obj) { + if (obj == null || (!(SingleColumnValueFilter.class.isInstance(obj)))) { + return false; + } + SingleColumnValueFilter f = (SingleColumnValueFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override public int hashCode() { + return Objects + .hash(Bytes.hashCode(this.getFamily()), Bytes.hashCode(this.getQualifier()), this.op, + this.getComparator(), this.getFilterIfMissing(), this.getLatestVersionOnly()); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java index c710548bdb..125d1c06a1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.yetus.audience.InterfaceAudience; @@ -100,7 +101,7 @@ public class SkipFilter extends FilterBase { public boolean filterRow() { return filterRow; } - + @Override public boolean hasFilterRow() { return true; @@ -111,8 +112,7 @@ public class SkipFilter extends FilterBase { */ @Override public byte[] toByteArray() throws IOException { - FilterProtos.SkipFilter.Builder builder = - FilterProtos.SkipFilter.newBuilder(); + FilterProtos.SkipFilter.Builder builder = FilterProtos.SkipFilter.newBuilder(); builder.setFilter(ProtobufUtil.toFilter(this.filter)); return builder.build().toByteArray(); } @@ -123,8 +123,7 @@ public class SkipFilter extends FilterBase { * @throws DeserializationException * @see #toByteArray */ - public static SkipFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static SkipFilter parseFrom(final byte[] pbBytes) throws DeserializationException { FilterProtos.SkipFilter proto; try { proto = FilterProtos.SkipFilter.parseFrom(pbBytes); @@ -145,10 +144,14 @@ public class SkipFilter extends FilterBase { */ @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof SkipFilter)) return false; + if (o == this) { + return true; + } + if (!(o instanceof SkipFilter)) { + return false; + } - SkipFilter other = (SkipFilter)o; + SkipFilter other = (SkipFilter) o; return getFilter().areSerializedFieldsEqual(other.getFilter()); } @@ -161,4 +164,18 @@ public class SkipFilter extends FilterBase { public String toString() { return this.getClass().getSimpleName() + " " + this.filter.toString(); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj.getClass() == this.getClass()))) { + return false; + } + SkipFilter f = (SkipFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.filter); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java index b3a8eae499..6b25a975b9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java @@ -6,9 +6,9 @@ * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

* Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; import java.util.TreeSet; import org.apache.hadoop.hbase.Cell; @@ -41,8 +42,7 @@ import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferExce * {@link org.apache.hadoop.hbase.client.Get#setTimestamp(long)}, * or {@link org.apache.hadoop.hbase.client.Scan#setTimestamp(long)}. */ -@InterfaceAudience.Public -public class TimestampsFilter extends FilterBase { +@InterfaceAudience.Public public class TimestampsFilter extends FilterBase { private final boolean canHint; TreeSet timestamps; @@ -97,26 +97,22 @@ public class TimestampsFilter extends FilterBase { /** * Gets the minimum timestamp requested by filter. - * @return minimum timestamp requested by filter. + * @return minimum timestamp requested by filter. */ public long getMin() { return minTimestamp; } - @Override - public boolean filterRowKey(Cell cell) throws IOException { + @Override public boolean filterRowKey(Cell cell) throws IOException { // Impl in FilterBase might do unnecessary copy for Off heap backed Cells. return false; } - @Deprecated - @Override - public ReturnCode filterKeyValue(final Cell c) { + @Deprecated @Override public ReturnCode filterKeyValue(final Cell c) { return filterCell(c); } - @Override - public ReturnCode filterCell(final Cell c) { + @Override public ReturnCode filterCell(final Cell c) { if (this.timestamps.contains(c.getTimestamp())) { return ReturnCode.INCLUDE; } else if (c.getTimestamp() < minTimestamp) { @@ -127,7 +123,6 @@ public class TimestampsFilter extends FilterBase { return canHint ? ReturnCode.SEEK_NEXT_USING_HINT : ReturnCode.SKIP; } - /** * Pick the next cell that the scanner should seek to. Since this can skip any number of cells * any of which can be a delete this can resurect old data. @@ -136,8 +131,7 @@ public class TimestampsFilter extends FilterBase { * * @throws IOException This will never happen. */ - @Override - public Cell getNextCellHint(Cell currentCell) throws IOException { + @Override public Cell getNextCellHint(Cell currentCell) throws IOException { if (!canHint) { return null; } @@ -162,9 +156,9 @@ public class TimestampsFilter extends FilterBase { return PrivateCellUtil.createFirstOnRowColTS(currentCell, nextTimestamp); } - public static Filter createFilterFromArguments(ArrayList filterArguments) { + public static Filter createFilterFromArguments(ArrayList filterArguments) { ArrayList timestamps = new ArrayList<>(filterArguments.size()); - for (int i = 0; ibytes * @see #toByteArray */ - public static TimestampsFilter parseFrom(final byte[] pbBytes) - throws DeserializationException { + public static TimestampsFilter parseFrom(final byte[] pbBytes) throws DeserializationException { FilterProtos.TimestampsFilter proto; try { proto = FilterProtos.TimestampsFilter.parseFrom(pbBytes); @@ -206,17 +197,19 @@ public class TimestampsFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ - @Override - boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof TimestampsFilter)) return false; + @Override boolean areSerializedFieldsEqual(Filter o) { + if (o == this) { + return true; + } + if (!(o instanceof TimestampsFilter)) { + return false; + } - TimestampsFilter other = (TimestampsFilter)o; + TimestampsFilter other = (TimestampsFilter) o; return this.getTimestamps().equals(other.getTimestamps()); } - @Override - public String toString() { + @Override public String toString() { return toString(MAX_LOG_TIMESTAMPS); } @@ -235,7 +228,19 @@ public class TimestampsFilter extends FilterBase { } } - return String.format("%s (%d/%d): [%s] canHint: [%b]", this.getClass().getSimpleName(), - count, this.timestamps.size(), tsList.toString(), canHint); + return String.format("%s (%d/%d): [%s] canHint: [%b]", this.getClass().getSimpleName(), count, + this.timestamps.size(), tsList.toString(), canHint); + } + + @Override public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof TimestampsFilter))) { + return false; + } + TimestampsFilter f = (TimestampsFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override public int hashCode() { + return Objects.hash(this.getTimestamps().toArray()); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java index 3faa111d11..e4c8a84c31 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; - +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; import org.apache.yetus.audience.InterfaceAudience; @@ -139,4 +139,18 @@ public class ValueFilter extends CompareFilter { return super.areSerializedFieldsEqual(o); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof ValueFilter))) { + return false; + } + ValueFilter f = (ValueFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.getComparator(), this.getCompareOperator()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java index 6c1a47f215..e04a6ad518 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.yetus.audience.InterfaceAudience; @@ -36,8 +37,7 @@ import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferExce * {@link org.apache.hadoop.hbase.filter.Filter#filterAllRemaining()} methods * returns true. */ -@InterfaceAudience.Public -public class WhileMatchFilter extends FilterBase { +@InterfaceAudience.Public public class WhileMatchFilter extends FilterBase { private boolean filterAllRemaining = false; private Filter filter; @@ -49,8 +49,7 @@ public class WhileMatchFilter extends FilterBase { return filter; } - @Override - public void reset() throws IOException { + @Override public void reset() throws IOException { this.filter.reset(); } @@ -58,63 +57,54 @@ public class WhileMatchFilter extends FilterBase { filterAllRemaining = filterAllRemaining || value; } - @Override - public boolean filterAllRemaining() throws IOException { + @Override public boolean filterAllRemaining() throws IOException { return this.filterAllRemaining || this.filter.filterAllRemaining(); } - @Override - public boolean filterRowKey(byte[] buffer, int offset, int length) throws IOException { + @Override public boolean filterRowKey(byte[] buffer, int offset, int length) throws IOException { boolean value = filter.filterRowKey(buffer, offset, length); changeFAR(value); return value; } - @Override - public boolean filterRowKey(Cell cell) throws IOException { - if (filterAllRemaining()) return true; + @Override public boolean filterRowKey(Cell cell) throws IOException { + if (filterAllRemaining()) { + return true; + } boolean value = filter.filterRowKey(cell); changeFAR(value); return value; } - @Deprecated - @Override - public ReturnCode filterKeyValue(final Cell c) throws IOException { + @Deprecated @Override public ReturnCode filterKeyValue(final Cell c) throws IOException { return filterCell(c); } - @Override - public ReturnCode filterCell(final Cell c) throws IOException { + @Override public ReturnCode filterCell(final Cell c) throws IOException { ReturnCode code = filter.filterCell(c); changeFAR(code != ReturnCode.INCLUDE); return code; } - @Override - public Cell transformCell(Cell v) throws IOException { + @Override public Cell transformCell(Cell v) throws IOException { return filter.transformCell(v); } - @Override - public boolean filterRow() throws IOException { + @Override public boolean filterRow() throws IOException { boolean filterRow = this.filter.filterRow(); changeFAR(filterRow); return filterRow; } - - @Override - public boolean hasFilterRow() { + + @Override public boolean hasFilterRow() { return true; } /** * @return The filter serialized using pb */ - @Override - public byte[] toByteArray() throws IOException { - FilterProtos.WhileMatchFilter.Builder builder = - FilterProtos.WhileMatchFilter.newBuilder(); + @Override public byte[] toByteArray() throws IOException { + FilterProtos.WhileMatchFilter.Builder builder = FilterProtos.WhileMatchFilter.newBuilder(); builder.setFilter(ProtobufUtil.toFilter(this.filter)); return builder.build().toByteArray(); } @@ -125,8 +115,7 @@ public class WhileMatchFilter extends FilterBase { * @throws org.apache.hadoop.hbase.exceptions.DeserializationException * @see #toByteArray */ - public static WhileMatchFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static WhileMatchFilter parseFrom(final byte[] pbBytes) throws DeserializationException { FilterProtos.WhileMatchFilter proto; try { proto = FilterProtos.WhileMatchFilter.parseFrom(pbBytes); @@ -145,22 +134,35 @@ public class WhileMatchFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ - @Override - boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof WhileMatchFilter)) return false; + @Override boolean areSerializedFieldsEqual(Filter o) { + if (o == this) { + return true; + } + if (!(o instanceof WhileMatchFilter)) { + return false; + } - WhileMatchFilter other = (WhileMatchFilter)o; + WhileMatchFilter other = (WhileMatchFilter) o; return getFilter().areSerializedFieldsEqual(other.getFilter()); } - @Override - public boolean isFamilyEssential(byte[] name) throws IOException { + @Override public boolean isFamilyEssential(byte[] name) throws IOException { return filter.isFamilyEssential(name); } - @Override - public String toString() { + @Override public String toString() { return this.getClass().getSimpleName() + " " + this.filter.toString(); } + + @Override public boolean equals(Object obj) { + if (obj == null || (!(obj.getClass() == this.getClass()))) { + return false; + } + WhileMatchFilter f = (WhileMatchFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override public int hashCode() { + return Objects.hash(this.filter); + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java index 24f750af04..c95f7bcc90 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.security.access; import java.io.IOException; import java.util.Map; - +import java.util.Objects; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -173,4 +173,25 @@ class AccessControlFilter extends FilterBase { throw new UnsupportedOperationException( "Serialization not supported. Intended for server-side use only."); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj.getClass() == this.getClass()))) { + return false; + } + if(this == obj){ + return true; + } + AccessControlFilter f=(AccessControlFilter)obj; + return this.authManager.equals(f.authManager) + &&this.table.equals(f.table) + &&this.user.equals(f.user) + &&this.strategy.equals(f.strategy) + &&this.cfVsMaxVersions.equals(f.cfVsMaxVersions); + } + + @Override + public int hashCode() { + return Objects.hash(this.authManager,this.table,this.strategy,this.user,this.cfVsMaxVersions); + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java index 4f00e7daa3..0ebaf09c46 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java @@ -1038,6 +1038,23 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso deleteCellVisTagsFormat); return matchFound ? ReturnCode.INCLUDE : ReturnCode.SKIP; } + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj.getClass() == this.getClass()))) { + return false; + } + if(this == obj){ + return true; + } + DeleteVersionVisibilityExpressionFilter f=(DeleteVersionVisibilityExpressionFilter)obj; + return this.deleteCellVisTags.equals(f.deleteCellVisTags) + &&this.deleteCellVisTagsFormat.equals(f.deleteCellVisTagsFormat); + } + + @Override + public int hashCode() { + return Objects.hash(this.deleteCellVisTags,this.deleteCellVisTagsFormat); + } } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java index 2bde9b59b1..d7eb6e5ac3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.security.visibility; import java.io.IOException; import java.util.Map; - +import java.util.Objects; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -90,4 +90,21 @@ class VisibilityLabelFilter extends FilterBase { this.curFamilyMaxVersions = 0; this.curQualMetVersions = 0; } + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj.getClass() == this.getClass()))) { + return false; + } + if(this == obj){ + return true; + } + VisibilityLabelFilter f=(VisibilityLabelFilter)obj; + return this.expEvaluator.equals(f.expEvaluator) + &&this.cfVsMaxVersions.equals(f.cfVsMaxVersions); + } + + @Override + public int hashCode() { + return Objects.hash(this.expEvaluator,this.cfVsMaxVersions); + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/ColumnCountOnRowFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/ColumnCountOnRowFilter.java index 5bede2ad11..5735a981c0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/ColumnCountOnRowFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/ColumnCountOnRowFilter.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.client; import java.io.IOException; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.yetus.audience.InterfaceAudience; @@ -55,4 +56,21 @@ public final class ColumnCountOnRowFilter extends FilterBase { public static ColumnCountOnRowFilter parseFrom(byte[] bytes) throws DeserializationException { return new ColumnCountOnRowFilter(Bytes.toInt(bytes)); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj.getClass() == this.getClass()))) { + return false; + } + if (this == obj) { + return true; + } + ColumnCountOnRowFilter f = (ColumnCountOnRowFilter) obj; + return this.limit == f.limit; + } + + @Override + public int hashCode() { + return Objects.hash(this.limit); + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java index 40628db3e3..e2a70b23b3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java @@ -54,11 +54,14 @@ public class FilterAllFilter extends FilterBase { @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) + if (o == this) { return true; - if (!(o instanceof FilterAllFilter)) + } + if (!(o instanceof FilterAllFilter)) { return false; + } return true; } + } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java index a3e3359c1d..e6d73fb33e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java @@ -25,6 +25,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; + import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java index 5f9515ac4d..42b65fad23 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java @@ -22,8 +22,8 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; - import java.io.IOException; +import java.util.Objects; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -988,6 +988,22 @@ public class TestFilterList { public boolean getTransformed() { return this.transformed; } + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj.getClass() == this.getClass()))) { + return false; + } + if(this==obj){ + return true; + } + TransformFilter f=(TransformFilter)obj; + return this.targetRetCode.equals(f.targetRetCode); + } + + @Override + public int hashCode() { + return Objects.hash(this.targetRetCode); + } } @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java index 815643d441..b53d40c9a6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java @@ -24,6 +24,7 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; import java.util.concurrent.ThreadLocalRandom; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseClassTestRule; @@ -52,8 +53,7 @@ import org.junit.experimental.categories.Category; public class TestSwitchToStreamRead { @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSwitchToStreamRead.class); + public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestSwitchToStreamRead.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); @@ -76,19 +76,15 @@ public class TestSwitchToStreamRead { } VALUE_PREFIX = sb.append("-").toString(); REGION = UTIL.createLocalHRegion( - TableDescriptorBuilder.newBuilder(TABLE_NAME) - .setColumnFamily( - ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setBlocksize(1024).build()) - .build(), - null, null); + TableDescriptorBuilder.newBuilder(TABLE_NAME) + .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setBlocksize(1024).build()).build(), + null, null); for (int i = 0; i < 900; i++) { - REGION - .put(new Put(Bytes.toBytes(i)).addColumn(FAMILY, QUAL, Bytes.toBytes(VALUE_PREFIX + i))); + REGION.put(new Put(Bytes.toBytes(i)).addColumn(FAMILY, QUAL, Bytes.toBytes(VALUE_PREFIX + i))); } REGION.flush(true); for (int i = 900; i < 1000; i++) { - REGION - .put(new Put(Bytes.toBytes(i)).addColumn(FAMILY, QUAL, Bytes.toBytes(VALUE_PREFIX + i))); + REGION.put(new Put(Bytes.toBytes(i)).addColumn(FAMILY, QUAL, Bytes.toBytes(VALUE_PREFIX + i))); } } @@ -101,8 +97,7 @@ public class TestSwitchToStreamRead { @Test public void test() throws IOException { try (RegionScannerImpl scanner = REGION.getScanner(new Scan())) { - StoreScanner storeScanner = - (StoreScanner) (scanner).getStoreHeapForTesting().getCurrentForTesting(); + StoreScanner storeScanner = (StoreScanner) (scanner).getStoreHeapForTesting().getCurrentForTesting(); for (KeyValueScanner kvs : storeScanner.getAllScannersForTesting()) { if (kvs instanceof StoreFileScanner) { StoreFileScanner sfScanner = (StoreFileScanner) kvs; @@ -145,12 +140,24 @@ public class TestSwitchToStreamRead { public boolean filterRowKey(Cell cell) throws IOException { return Bytes.toInt(cell.getRowArray(), cell.getRowOffset()) != 999; } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj.getClass() == this.getClass()))) { + return false; + } + return this == obj; + } + + @Override + public int hashCode() { + return Objects.hash(MatchLastRowKeyFilter.class.getName()); + } } private void testFilter(Filter filter) throws IOException { try (RegionScannerImpl scanner = REGION.getScanner(new Scan().setFilter(filter))) { - StoreScanner storeScanner = - (StoreScanner) (scanner).getStoreHeapForTesting().getCurrentForTesting(); + StoreScanner storeScanner = (StoreScanner) (scanner).getStoreHeapForTesting().getCurrentForTesting(); for (KeyValueScanner kvs : storeScanner.getAllScannersForTesting()) { if (kvs instanceof StoreFileScanner) { StoreFileScanner sfScanner = (StoreFileScanner) kvs; @@ -160,8 +167,7 @@ public class TestSwitchToStreamRead { } List cells = new ArrayList<>(); // should return before finishing the scan as we want to switch from pread to stream - assertTrue(scanner.next(cells, - ScannerContext.newBuilder().setTimeLimit(LimitScope.BETWEEN_CELLS, -1).build())); + assertTrue(scanner.next(cells, ScannerContext.newBuilder().setTimeLimit(LimitScope.BETWEEN_CELLS, -1).build())); assertTrue(cells.isEmpty()); scanner.shipped(); @@ -172,8 +178,7 @@ public class TestSwitchToStreamRead { assertFalse(sfScanner.getReader().shared); } } - assertFalse(scanner.next(cells, - ScannerContext.newBuilder().setTimeLimit(LimitScope.BETWEEN_CELLS, -1).build())); + assertFalse(scanner.next(cells, ScannerContext.newBuilder().setTimeLimit(LimitScope.BETWEEN_CELLS, -1).build())); Result result = Result.create(cells); assertEquals(VALUE_PREFIX + 999, Bytes.toString(result.getValue(FAMILY, QUAL))); cells.clear(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java index 861b83e2fe..c571020ad3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java @@ -21,6 +21,7 @@ import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; + import java.io.IOException; import java.util.ArrayList; import java.util.List; diff --git a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java index 694fb6ab8c..3fdc14b60f 100644 --- a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java +++ b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java @@ -21,9 +21,11 @@ import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; import java.io.IOException; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -46,15 +48,13 @@ import scala.collection.mutable.MutableList; * by SparkSQL so that we have make the filters at the region server level * and avoid sending the data back to the client to be filtered. */ -@InterfaceAudience.Private -public class SparkSQLPushDownFilter extends FilterBase{ +@InterfaceAudience.Private public class SparkSQLPushDownFilter extends FilterBase { protected static final Logger log = LoggerFactory.getLogger(SparkSQLPushDownFilter.class); //The following values are populated with protobuffer DynamicLogicExpression dynamicLogicExpression; byte[][] valueFromQueryArray; - HashMap> - currentCellToColumnIndexMap; + HashMap> currentCellToColumnIndexMap; //The following values are transient HashMap columnToCurrentRowValueMap = null; @@ -65,10 +65,9 @@ public class SparkSQLPushDownFilter extends FilterBase{ String encoderClassName; public SparkSQLPushDownFilter(DynamicLogicExpression dynamicLogicExpression, - byte[][] valueFromQueryArray, - HashMap> - currentCellToColumnIndexMap, String encoderClassName) { + byte[][] valueFromQueryArray, + HashMap> currentCellToColumnIndexMap, + String encoderClassName) { this.dynamicLogicExpression = dynamicLogicExpression; this.valueFromQueryArray = valueFromQueryArray; this.currentCellToColumnIndexMap = currentCellToColumnIndexMap; @@ -76,15 +75,13 @@ public class SparkSQLPushDownFilter extends FilterBase{ } public SparkSQLPushDownFilter(DynamicLogicExpression dynamicLogicExpression, - byte[][] valueFromQueryArray, - MutableList fields, String encoderClassName) { + byte[][] valueFromQueryArray, MutableList fields, String encoderClassName) { this.dynamicLogicExpression = dynamicLogicExpression; this.valueFromQueryArray = valueFromQueryArray; this.encoderClassName = encoderClassName; //generate family qualifier to index mapping - this.currentCellToColumnIndexMap = - new HashMap<>(); + this.currentCellToColumnIndexMap = new HashMap<>(); for (int i = 0; i < fields.size(); i++) { Field field = fields.apply(i); @@ -94,7 +91,7 @@ public class SparkSQLPushDownFilter extends FilterBase{ new ByteArrayComparable(cfBytes, 0, cfBytes.length); HashMap qualifierIndexMap = - currentCellToColumnIndexMap.get(familyByteComparable); + currentCellToColumnIndexMap.get(familyByteComparable); if (qualifierIndexMap == null) { qualifierIndexMap = new HashMap<>(); @@ -108,67 +105,53 @@ public class SparkSQLPushDownFilter extends FilterBase{ } } - @Override - public ReturnCode filterCell(final Cell c) throws IOException { + @Override public ReturnCode filterCell(final Cell c) throws IOException { //If the map RowValueMap is empty then we need to populate // the row key if (columnToCurrentRowValueMap == null) { columnToCurrentRowValueMap = new HashMap<>(); - HashMap qualifierColumnMap = - currentCellToColumnIndexMap.get( - new ByteArrayComparable(rowKeyFamily, 0, rowKeyFamily.length)); + HashMap qualifierColumnMap = currentCellToColumnIndexMap + .get(new ByteArrayComparable(rowKeyFamily, 0, rowKeyFamily.length)); if (qualifierColumnMap != null) { - String rowKeyColumnName = - qualifierColumnMap.get( - new ByteArrayComparable(rowKeyQualifier, 0, - rowKeyQualifier.length)); + String rowKeyColumnName = qualifierColumnMap + .get(new ByteArrayComparable(rowKeyQualifier, 0, rowKeyQualifier.length)); //Make sure that the rowKey is part of the where clause if (rowKeyColumnName != null) { columnToCurrentRowValueMap.put(rowKeyColumnName, - new ByteArrayComparable(c.getRowArray(), - c.getRowOffset(), c.getRowLength())); + new ByteArrayComparable(c.getRowArray(), c.getRowOffset(), c.getRowLength())); } } } //Always populate the column value into the RowValueMap ByteArrayComparable currentFamilyByteComparable = - new ByteArrayComparable(c.getFamilyArray(), - c.getFamilyOffset(), - c.getFamilyLength()); + new ByteArrayComparable(c.getFamilyArray(), c.getFamilyOffset(), c.getFamilyLength()); HashMap qualifierColumnMap = - currentCellToColumnIndexMap.get( - currentFamilyByteComparable); + currentCellToColumnIndexMap.get(currentFamilyByteComparable); if (qualifierColumnMap != null) { - String columnName = - qualifierColumnMap.get( - new ByteArrayComparable(c.getQualifierArray(), - c.getQualifierOffset(), - c.getQualifierLength())); + String columnName = qualifierColumnMap.get( + new ByteArrayComparable(c.getQualifierArray(), c.getQualifierOffset(), + c.getQualifierLength())); if (columnName != null) { columnToCurrentRowValueMap.put(columnName, - new ByteArrayComparable(c.getValueArray(), - c.getValueOffset(), c.getValueLength())); + new ByteArrayComparable(c.getValueArray(), c.getValueOffset(), c.getValueLength())); } } return ReturnCode.INCLUDE; } - - @Override - public boolean filterRow() throws IOException { + @Override public boolean filterRow() throws IOException { try { boolean result = - dynamicLogicExpression.execute(columnToCurrentRowValueMap, - valueFromQueryArray); + dynamicLogicExpression.execute(columnToCurrentRowValueMap, valueFromQueryArray); columnToCurrentRowValueMap = null; return !result; } catch (Throwable e) { @@ -177,15 +160,13 @@ public class SparkSQLPushDownFilter extends FilterBase{ return false; } - /** * @param pbBytes A pb serialized instance * @return An instance of SparkSQLPushDownFilter * @throws DeserializationException if the filter cannot be parsed from the given bytes */ - @SuppressWarnings("unused") - public static SparkSQLPushDownFilter parseFrom(final byte[] pbBytes) - throws DeserializationException { + @SuppressWarnings("unused") public static SparkSQLPushDownFilter parseFrom(final byte[] pbBytes) + throws DeserializationException { SparkFilterProtos.SQLPredicatePushDownFilter proto; try { @@ -199,7 +180,7 @@ public class SparkSQLPushDownFilter extends FilterBase{ //Load DynamicLogicExpression DynamicLogicExpression dynamicLogicExpression = - DynamicLogicExpressionBuilder.build(proto.getDynamicLogicExpression(), enc); + DynamicLogicExpressionBuilder.build(proto.getDynamicLogicExpression(), enc); //Load valuesFromQuery final List valueFromQueryArrayList = proto.getValueFromQueryArrayList(); @@ -209,36 +190,33 @@ public class SparkSQLPushDownFilter extends FilterBase{ } //Load mapping from HBase family/qualifier to Spark SQL columnName - HashMap> - currentCellToColumnIndexMap = new HashMap<>(); + HashMap> currentCellToColumnIndexMap = + new HashMap<>(); - for (SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping - sqlPredicatePushDownCellToColumnMapping : - proto.getCellToColumnMappingList()) { + for (SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping sqlPredicatePushDownCellToColumnMapping : proto + .getCellToColumnMappingList()) { - byte[] familyArray = - sqlPredicatePushDownCellToColumnMapping.getColumnFamily().toByteArray(); + byte[] familyArray = sqlPredicatePushDownCellToColumnMapping.getColumnFamily().toByteArray(); ByteArrayComparable familyByteComparable = - new ByteArrayComparable(familyArray, 0, familyArray.length); + new ByteArrayComparable(familyArray, 0, familyArray.length); HashMap qualifierMap = - currentCellToColumnIndexMap.get(familyByteComparable); + currentCellToColumnIndexMap.get(familyByteComparable); if (qualifierMap == null) { qualifierMap = new HashMap<>(); currentCellToColumnIndexMap.put(familyByteComparable, qualifierMap); } - byte[] qualifierArray = - sqlPredicatePushDownCellToColumnMapping.getQualifier().toByteArray(); + byte[] qualifierArray = sqlPredicatePushDownCellToColumnMapping.getQualifier().toByteArray(); ByteArrayComparable qualifierByteComparable = - new ByteArrayComparable(qualifierArray, 0 ,qualifierArray.length); + new ByteArrayComparable(qualifierArray, 0, qualifierArray.length); - qualifierMap.put(qualifierByteComparable, - sqlPredicatePushDownCellToColumnMapping.getColumnName()); + qualifierMap + .put(qualifierByteComparable, sqlPredicatePushDownCellToColumnMapping.getColumnName()); } - return new SparkSQLPushDownFilter(dynamicLogicExpression, - valueFromQueryArray, currentCellToColumnIndexMap, encoder); + return new SparkSQLPushDownFilter(dynamicLogicExpression, valueFromQueryArray, + currentCellToColumnIndexMap, encoder); } /** @@ -247,31 +225,56 @@ public class SparkSQLPushDownFilter extends FilterBase{ public byte[] toByteArray() { SparkFilterProtos.SQLPredicatePushDownFilter.Builder builder = - SparkFilterProtos.SQLPredicatePushDownFilter.newBuilder(); + SparkFilterProtos.SQLPredicatePushDownFilter.newBuilder(); SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder columnMappingBuilder = - SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.newBuilder(); + SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.newBuilder(); builder.setDynamicLogicExpression(dynamicLogicExpression.toExpressionString()); - for (byte[] valueFromQuery: valueFromQueryArray) { + for (byte[] valueFromQuery : valueFromQueryArray) { builder.addValueFromQueryArray(ByteStringer.wrap(valueFromQuery)); } - for (Map.Entry> - familyEntry : currentCellToColumnIndexMap.entrySet()) { - for (Map.Entry qualifierEntry : - familyEntry.getValue().entrySet()) { - columnMappingBuilder.setColumnFamily( - ByteStringer.wrap(familyEntry.getKey().bytes())); - columnMappingBuilder.setQualifier( - ByteStringer.wrap(qualifierEntry.getKey().bytes())); + for (Map.Entry> familyEntry : currentCellToColumnIndexMap + .entrySet()) { + for (Map.Entry qualifierEntry : familyEntry.getValue() + .entrySet()) { + columnMappingBuilder.setColumnFamily(ByteStringer.wrap(familyEntry.getKey().bytes())); + columnMappingBuilder.setQualifier(ByteStringer.wrap(qualifierEntry.getKey().bytes())); columnMappingBuilder.setColumnName(qualifierEntry.getValue()); builder.addCellToColumnMapping(columnMappingBuilder.build()); } } builder.setEncoderClassName(encoderClassName); - return builder.build().toByteArray(); } + + @Override public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof SparkSQLPushDownFilter))) { + return false; + } + if (this == obj) { + return true; + } + SparkSQLPushDownFilter f = (SparkSQLPushDownFilter) obj; + if (this.valueFromQueryArray.length != f.valueFromQueryArray.length) { + return false; + } + int i = 0; + for (byte[] val : this.valueFromQueryArray) { + if (!Bytes.equals(val, f.valueFromQueryArray[i])) { + return false; + } + i++; + } + return this.dynamicLogicExpression.toExpressionString() + .equals(f.dynamicLogicExpression.toExpressionString()) && this.currentCellToColumnIndexMap + .equals(f.currentCellToColumnIndexMap) && this.encoderClassName.equals(f.encoderClassName); + } + + @Override public int hashCode() { + return Objects.hash(this.dynamicLogicExpression, Arrays.hashCode(this.valueFromQueryArray), + this.currentCellToColumnIndexMap, this.encoderClassName); + } }