diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java index 3aaac36268..983ac533b1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java @@ -1,40 +1,31 @@ /* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable + * law or agreed to in writing, software distributed under the License is distributed on an "AS IS" + * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License + * for the specific language governing permissions and limitations under the License. */ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; - import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; +import org.apache.yetus.audience.InterfaceAudience; /** - * Simple filter that returns first N columns on row only. - * This filter was written to test filters in Get and as soon as it gets - * its quota of columns, {@link #filterAllRemaining()} returns true. This - * makes this filter unsuitable as a Scan filter. + * Simple filter that returns first N columns on row only. This filter was written to test filters + * in Get and as soon as it gets its quota of columns, {@link #filterAllRemaining()} returns true. + * This makes this filter unsuitable as a Scan filter. */ @InterfaceAudience.Public public class ColumnCountGetFilter extends FilterBase { @@ -53,7 +44,9 @@ public class ColumnCountGetFilter extends FilterBase { @Override public boolean filterRowKey(Cell cell) throws IOException { // Impl in FilterBase might do unnecessary copy for Off heap backed Cells. - if (filterAllRemaining()) return true; + if (filterAllRemaining()) { + return true; + } return false; } @@ -79,9 +72,9 @@ public class ColumnCountGetFilter extends FilterBase { this.count = 0; } - public static Filter createFilterFromArguments(ArrayList filterArguments) { - Preconditions.checkArgument(filterArguments.size() == 1, - "Expected 1 but got: %s", filterArguments.size()); + public static Filter createFilterFromArguments(ArrayList filterArguments) { + Preconditions.checkArgument(filterArguments.size() == 1, "Expected 1 but got: %d", + filterArguments.size()); int limit = ParseFilter.convertByteArrayToInt(filterArguments.get(0)); return new ColumnCountGetFilter(limit); } @@ -90,9 +83,9 @@ public class ColumnCountGetFilter extends FilterBase { * @return The filter serialized using pb */ @Override - public byte [] toByteArray() { + public byte[] toByteArray() { FilterProtos.ColumnCountGetFilter.Builder builder = - FilterProtos.ColumnCountGetFilter.newBuilder(); + FilterProtos.ColumnCountGetFilter.newBuilder(); builder.setLimit(this.limit); return builder.build().toByteArray(); } @@ -103,8 +96,8 @@ public class ColumnCountGetFilter extends FilterBase { * @throws org.apache.hadoop.hbase.exceptions.DeserializationException * @see #toByteArray */ - public static ColumnCountGetFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static ColumnCountGetFilter parseFrom(final byte[] pbBytes) + throws DeserializationException { FilterProtos.ColumnCountGetFilter proto; try { proto = FilterProtos.ColumnCountGetFilter.parseFrom(pbBytes); @@ -116,15 +109,19 @@ public class ColumnCountGetFilter extends FilterBase { /** * @param o the other filter to compare with - * @return true if and only if the fields of the filter that are serialized - * are equal to the corresponding fields in other. Used for testing. + * @return true if and only if the fields of the filter that are serialized are equal to the + * corresponding fields in other. Used for testing. */ @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof ColumnCountGetFilter)) return false; + if (o == this) { + return true; + } + if (!(o instanceof ColumnCountGetFilter)) { + return false; + } - ColumnCountGetFilter other = (ColumnCountGetFilter)o; + ColumnCountGetFilter other = (ColumnCountGetFilter) o; return this.getLimit() == other.getLimit(); } @@ -132,4 +129,19 @@ public class ColumnCountGetFilter extends FilterBase { public String toString() { return this.getClass().getSimpleName() + " " + this.limit; } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof ColumnCountGetFilter))) { + return false; + } + ColumnCountGetFilter f = (ColumnCountGetFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.limit); + } + } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java index c90047da8b..4f592e98ac 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -230,4 +231,18 @@ public class ColumnPaginationFilter extends FilterBase { return String.format("%s (%d, %d)", this.getClass().getSimpleName(), this.limit, this.offset); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof ColumnPaginationFilter))) { + return false; + } + ColumnPaginationFilter f = (ColumnPaginationFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.limit, this.offset); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java index a403a40425..9892018f2f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.ByteBufferExtendedCell; import org.apache.hadoop.hbase.Cell; @@ -42,9 +43,9 @@ import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations; */ @InterfaceAudience.Public public class ColumnPrefixFilter extends FilterBase { - protected byte [] prefix = null; + protected byte[] prefix = null; - public ColumnPrefixFilter(final byte [] prefix) { + public ColumnPrefixFilter(final byte[] prefix) { this.prefix = prefix; } @@ -94,19 +95,26 @@ public class ColumnPrefixFilter extends FilterBase { } } - private static int compareQualifierPart(Cell cell, int length, byte[] prefix) { + private static int compareQualifierPart(Cell cell, int length, + byte[] prefix) { if (cell instanceof ByteBufferExtendedCell) { - return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) cell).getQualifierByteBuffer(), - ((ByteBufferExtendedCell) cell).getQualifierPosition(), length, prefix, 0, length); + return ByteBufferUtils.compareTo( + ((ByteBufferExtendedCell) cell).getQualifierByteBuffer(), + ((ByteBufferExtendedCell) cell).getQualifierPosition(), length, + prefix, 0, length); } - return Bytes.compareTo(cell.getQualifierArray(), cell.getQualifierOffset(), length, prefix, 0, - length); + return Bytes + .compareTo(cell.getQualifierArray(), cell.getQualifierOffset(), + length, prefix, 0, length); } - public static Filter createFilterFromArguments(ArrayList filterArguments) { + public static Filter createFilterFromArguments( + ArrayList filterArguments) { Preconditions.checkArgument(filterArguments.size() == 1, - "Expected 1 but got: %s", filterArguments.size()); - byte [] columnPrefix = ParseFilter.removeQuotesFromByteArray(filterArguments.get(0)); + "Expected 1 but got: %s", + filterArguments.size()); + byte[] columnPrefix = + ParseFilter.removeQuotesFromByteArray(filterArguments.get(0)); return new ColumnPrefixFilter(columnPrefix); } @@ -114,10 +122,12 @@ public class ColumnPrefixFilter extends FilterBase { * @return The filter serialized using pb */ @Override - public byte [] toByteArray() { + public byte[] toByteArray() { FilterProtos.ColumnPrefixFilter.Builder builder = - FilterProtos.ColumnPrefixFilter.newBuilder(); - if (this.prefix != null) builder.setPrefix(UnsafeByteOperations.unsafeWrap(this.prefix)); + FilterProtos.ColumnPrefixFilter.newBuilder(); + if (this.prefix != null) { + builder.setPrefix(UnsafeByteOperations.unsafeWrap(this.prefix)); + } return builder.build().toByteArray(); } @@ -127,8 +137,8 @@ public class ColumnPrefixFilter extends FilterBase { * @throws org.apache.hadoop.hbase.exceptions.DeserializationException * @see #toByteArray */ - public static ColumnPrefixFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static ColumnPrefixFilter parseFrom(final byte[] pbBytes) + throws DeserializationException { FilterProtos.ColumnPrefixFilter proto; try { proto = FilterProtos.ColumnPrefixFilter.parseFrom(pbBytes); @@ -145,10 +155,14 @@ public class ColumnPrefixFilter extends FilterBase { */ @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof ColumnPrefixFilter)) return false; + if (o == this) { + return true; + } + if (!(o instanceof ColumnPrefixFilter)) { + return false; + } - ColumnPrefixFilter other = (ColumnPrefixFilter)o; + ColumnPrefixFilter other = (ColumnPrefixFilter) o; return Bytes.equals(this.getPrefix(), other.getPrefix()); } @@ -159,6 +173,21 @@ public class ColumnPrefixFilter extends FilterBase { @Override public String toString() { - return this.getClass().getSimpleName() + " " + Bytes.toStringBinary(this.prefix); + return this.getClass().getSimpleName() + " " + Bytes + .toStringBinary(this.prefix); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof ColumnPrefixFilter))) { + return false; + } + ColumnPrefixFilter f = (ColumnPrefixFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(Bytes.hashCode(this.getPrefix())); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java index f981ed75e4..1643beb88c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java @@ -1,20 +1,12 @@ /* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable + * law or agreed to in writing, software distributed under the License is distributed on an "AS IS" + * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License + * for the specific language governing permissions and limitations under the License. */ package org.apache.hadoop.hbase.filter; @@ -23,30 +15,27 @@ import static org.apache.hadoop.hbase.util.Bytes.len; import java.io.IOException; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.PrivateCellUtil; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; -import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; -import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; - import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; +import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations; +import org.apache.yetus.audience.InterfaceAudience; /** - * This filter is used for selecting only those keys with columns that are - * between minColumn to maxColumn. For example, if minColumn is 'an', and - * maxColumn is 'be', it will pass keys with columns like 'ana', 'bad', but not - * keys with columns like 'bed', 'eye' - * - * If minColumn is null, there is no lower bound. If maxColumn is null, there is - * no upper bound. - * - * minColumnInclusive and maxColumnInclusive specify if the ranges are inclusive - * or not. + * This filter is used for selecting only those keys with columns that are between minColumn to + * maxColumn. For example, if minColumn is 'an', and maxColumn is 'be', it will pass keys with + * columns like 'ana', 'bad', but not keys with columns like 'bed', 'eye' + *

+ * If minColumn is null, there is no lower bound. If maxColumn is null, there is no upper bound. + *

+ * minColumnInclusive and maxColumnInclusive specify if the ranges are inclusive or not. */ @InterfaceAudience.Public public class ColumnRangeFilter extends FilterBase { @@ -56,14 +45,11 @@ public class ColumnRangeFilter extends FilterBase { protected boolean maxColumnInclusive = false; /** - * Create a filter to select those keys with columns that are between minColumn - * and maxColumn. - * @param minColumn minimum value for the column range. If if it's null, - * there is no lower bound. + * Create a filter to select those keys with columns that are between minColumn and maxColumn. + * @param minColumn minimum value for the column range. If if it's null, there is no lower bound. * @param minColumnInclusive if true, include minColumn in the range. * @param maxColumn maximum value for the column range. If it's null, - * @param maxColumnInclusive if true, include maxColumn in the range. - * there is no upper bound. + * @param maxColumnInclusive if true, include maxColumn in the range. there is no upper bound. */ public ColumnRangeFilter(final byte[] minColumn, boolean minColumnInclusive, final byte[] maxColumn, boolean maxColumnInclusive) { @@ -156,34 +142,32 @@ public class ColumnRangeFilter extends FilterBase { return ReturnCode.NEXT_ROW; } - public static Filter createFilterFromArguments(ArrayList filterArguments) { - Preconditions.checkArgument(filterArguments.size() == 4, - "Expected 4 but got: %s", filterArguments.size()); - byte [] minColumn = ParseFilter.removeQuotesFromByteArray(filterArguments.get(0)); + public static Filter createFilterFromArguments(ArrayList filterArguments) { + Preconditions.checkArgument(filterArguments.size() == 4, "Expected 4 but got: %s", + filterArguments.size()); + byte[] minColumn = ParseFilter.removeQuotesFromByteArray(filterArguments.get(0)); boolean minColumnInclusive = ParseFilter.convertByteArrayToBoolean(filterArguments.get(1)); - byte [] maxColumn = ParseFilter.removeQuotesFromByteArray(filterArguments.get(2)); + byte[] maxColumn = ParseFilter.removeQuotesFromByteArray(filterArguments.get(2)); boolean maxColumnInclusive = ParseFilter.convertByteArrayToBoolean(filterArguments.get(3)); - if (minColumn.length == 0) - minColumn = null; - if (maxColumn.length == 0) - maxColumn = null; - return new ColumnRangeFilter(minColumn, minColumnInclusive, - maxColumn, maxColumnInclusive); + if (minColumn.length == 0) minColumn = null; + if (maxColumn.length == 0) maxColumn = null; + return new ColumnRangeFilter(minColumn, minColumnInclusive, maxColumn, maxColumnInclusive); } /** * @return The filter serialized using pb */ @Override - public byte [] toByteArray() { - FilterProtos.ColumnRangeFilter.Builder builder = - FilterProtos.ColumnRangeFilter.newBuilder(); - if (this.minColumn != null) builder.setMinColumn( - UnsafeByteOperations.unsafeWrap(this.minColumn)); + public byte[] toByteArray() { + FilterProtos.ColumnRangeFilter.Builder builder = FilterProtos.ColumnRangeFilter.newBuilder(); + if (this.minColumn != null) { + builder.setMinColumn(UnsafeByteOperations.unsafeWrap(this.minColumn)); + } builder.setMinColumnInclusive(this.minColumnInclusive); - if (this.maxColumn != null) builder.setMaxColumn( - UnsafeByteOperations.unsafeWrap(this.maxColumn)); + if (this.maxColumn != null) { + builder.setMaxColumn(UnsafeByteOperations.unsafeWrap(this.maxColumn)); + } builder.setMaxColumnInclusive(this.maxColumnInclusive); return builder.build().toByteArray(); } @@ -194,17 +178,17 @@ public class ColumnRangeFilter extends FilterBase { * @throws DeserializationException * @see #toByteArray */ - public static ColumnRangeFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static ColumnRangeFilter parseFrom(final byte[] pbBytes) throws DeserializationException { FilterProtos.ColumnRangeFilter proto; try { proto = FilterProtos.ColumnRangeFilter.parseFrom(pbBytes); } catch (InvalidProtocolBufferException e) { throw new DeserializationException(e); } - return new ColumnRangeFilter(proto.hasMinColumn()?proto.getMinColumn().toByteArray():null, - proto.getMinColumnInclusive(),proto.hasMaxColumn()?proto.getMaxColumn().toByteArray():null, - proto.getMaxColumnInclusive()); + return new ColumnRangeFilter(proto.hasMinColumn() ? proto.getMinColumn().toByteArray() : null, + proto.getMinColumnInclusive(), + proto.hasMaxColumn() ? proto.getMaxColumn().toByteArray() : null, + proto.getMaxColumnInclusive()); } /** @@ -234,9 +218,23 @@ public class ColumnRangeFilter extends FilterBase { @Override public String toString() { - return this.getClass().getSimpleName() + " " - + (this.minColumnInclusive ? "[" : "(") + Bytes.toStringBinary(this.minColumn) - + ", " + Bytes.toStringBinary(this.maxColumn) + return this.getClass().getSimpleName() + " " + (this.minColumnInclusive ? "[" : "(") + + Bytes.toStringBinary(this.minColumn) + ", " + Bytes.toStringBinary(this.maxColumn) + (this.maxColumnInclusive ? "]" : ")"); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof ColumnRangeFilter))) { + return false; + } + ColumnRangeFilter f = (ColumnRangeFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(Bytes.toStringBinary(this.getMinColumn()), this.getMinColumnInclusive(), + Bytes.toStringBinary(this.getMaxColumn()), this.getMaxColumnInclusive()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java index 07951658f8..9e61dba819 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -238,4 +239,18 @@ public class ColumnValueFilter extends FilterBase { Bytes.toStringBinary(this.qualifier), this.op.name(), Bytes.toStringBinary(this.comparator.getValue())); } + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof ColumnValueFilter))) { + return false; + } + ColumnValueFilter f = (ColumnValueFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(Bytes.hashCode(this.getFamily()), Bytes.hashCode(this.getQualifier()) + , this.getCompareOperator(), this.getComparator()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java index 50924ec631..73c60fd646 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; @@ -321,4 +322,18 @@ public abstract class CompareFilter extends FilterBase { this.op.name(), Bytes.toStringBinary(this.comparator.getValue())); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof CompareFilter))) { + return false; + } + CompareFilter f = (CompareFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.getComparator(), this.getCompareOperator()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java index 8f5dee7924..7d8dd60d8d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.List; +import java.util.Objects; import java.util.Set; import org.apache.hadoop.hbase.Cell; @@ -311,4 +312,18 @@ public class DependentColumnFilter extends CompareFilter { this.op.name(), this.comparator != null ? Bytes.toStringBinary(this.comparator.getValue()) : "null"); } + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof DependentColumnFilter))) { + return false; + } + DependentColumnFilter f = (DependentColumnFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(Bytes.hashCode(this.getFamily()),Bytes.hashCode(this.getQualifier()), + this.dropDependentColumn()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java index f114e98b7d..57279f0223 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; - +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; import org.apache.yetus.audience.InterfaceAudience; @@ -146,4 +146,18 @@ public class FamilyFilter extends CompareFilter { FamilyFilter other = (FamilyFilter)o; return super.areSerializedFieldsEqual(other); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof FamilyFilter))) { + return false; + } + FamilyFilter f = (FamilyFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.getComparator(), this.getCompareOperator()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java index c549eabb6c..f8be2f6db9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java @@ -23,6 +23,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.yetus.audience.InterfaceAudience; @@ -276,4 +277,17 @@ final public class FilterList extends FilterBase { public String toString() { return this.filterListBase.toString(); } + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof FilterList))) { + return false; + } + FilterList f = (FilterList) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.getOperator(), Arrays.hashCode(this.getFilters().toArray())); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java index ebff3a5d73..b9fe00ab79 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java @@ -24,8 +24,10 @@ import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Objects; /** * FilterListWithAND represents an ordered list of filters which will be evaluated with an AND @@ -279,4 +281,21 @@ public class FilterListWithAND extends FilterListBase { } return maxHint; } + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof FilterListWithAND))) { + return false; + } + if (this == obj) { + return true; + } + FilterListWithAND f = (FilterListWithAND) obj; + return this.filters.equals(f.getFilters()) + &&this.seekHintFilters.equals(f.seekHintFilters); + } + + @Override + public int hashCode() { + return Objects.hash(this.seekHintFilters, Arrays.hashCode(this.filters.toArray())); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java index 064dd8387b..991563e13c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java @@ -26,8 +26,10 @@ import org.apache.yetus.audience.InterfaceAudience; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Objects; /** * FilterListWithOR represents an ordered list of filters which will be evaluated with an OR @@ -391,4 +393,26 @@ public class FilterListWithOR extends FilterListBase { } return minKeyHint; } + + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof FilterListWithOR))) { + return false; + } + if (this == obj) { + return true; + } + FilterListWithOR f = (FilterListWithOR) obj; + return this.filters.equals(f.getFilters()) + &&this.prevFilterRCList.equals(f.prevFilterRCList) + &&this.prevCellList.equals(f.prevCellList); + } + + @Override + public int hashCode() { + return Objects.hash(Arrays.hashCode(this.prevFilterRCList.toArray()), + Arrays.hashCode(this.prevCellList.toArray()), + Arrays.hashCode(this.filters.toArray())); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java index a6b990f4b7..f9691bef97 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java @@ -1,20 +1,12 @@ /* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable + * law or agreed to in writing, software distributed under the License is distributed on an "AS IS" + * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License + * for the specific language governing permissions and limitations under the License. */ package org.apache.hadoop.hbase.filter; @@ -22,12 +14,11 @@ import java.io.IOException; import java.util.ArrayList; import org.apache.hadoop.hbase.Cell; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; - import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; +import org.apache.yetus.audience.InterfaceAudience; /** * A filter that will only return the first KV from each row. @@ -60,14 +51,16 @@ public class FirstKeyOnlyFilter extends FilterBase { @Override public ReturnCode filterCell(final Cell c) { - if(foundKV) return ReturnCode.NEXT_ROW; + if (foundKV) { + return ReturnCode.NEXT_ROW; + } foundKV = true; return ReturnCode.INCLUDE; } - public static Filter createFilterFromArguments(ArrayList filterArguments) { - Preconditions.checkArgument(filterArguments.isEmpty(), - "Expected 0 but got: %s", filterArguments.size()); + public static Filter createFilterFromArguments(ArrayList filterArguments) { + Preconditions.checkArgument(filterArguments.isEmpty(), "Expected 0 but got: %s", + filterArguments.size()); return new FirstKeyOnlyFilter(); } @@ -79,7 +72,6 @@ public class FirstKeyOnlyFilter extends FilterBase { } /** - * * @param value update {@link #foundKV} flag with value. */ protected void setFoundKV(boolean value) { @@ -90,9 +82,8 @@ public class FirstKeyOnlyFilter extends FilterBase { * @return The filter serialized using pb */ @Override - public byte [] toByteArray() { - FilterProtos.FirstKeyOnlyFilter.Builder builder = - FilterProtos.FirstKeyOnlyFilter.newBuilder(); + public byte[] toByteArray() { + FilterProtos.FirstKeyOnlyFilter.Builder builder = FilterProtos.FirstKeyOnlyFilter.newBuilder(); return builder.build().toByteArray(); } @@ -102,9 +93,8 @@ public class FirstKeyOnlyFilter extends FilterBase { * @throws org.apache.hadoop.hbase.exceptions.DeserializationException * @see #toByteArray */ - public static FirstKeyOnlyFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { - // There is nothing to deserialize. Why do this at all? + public static FirstKeyOnlyFilter parseFrom(final byte[] pbBytes) throws DeserializationException { + // There is nothing to deserialize. Why do this at all? try { FilterProtos.FirstKeyOnlyFilter.parseFrom(pbBytes); } catch (InvalidProtocolBufferException e) { @@ -116,14 +106,17 @@ public class FirstKeyOnlyFilter extends FilterBase { /** * @param o the other filter to compare with - * @return true if and only if the fields of the filter that are serialized - * are equal to the corresponding fields in other. Used for testing. + * @return true if and only if the fields of the filter that are serialized are equal to the + * corresponding fields in other. Used for testing. */ @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof FirstKeyOnlyFilter)) return false; - + if (o == this) { + return true; + } + if (!(o instanceof FirstKeyOnlyFilter)) { + return false; + } return true; } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java index 88dc36a740..4775643fb3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.filter; +import java.util.Objects; import java.util.Set; import java.util.TreeSet; @@ -133,4 +134,18 @@ public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter { FirstKeyValueMatchingQualifiersFilter other = (FirstKeyValueMatchingQualifiersFilter)o; return this.qualifiers.equals(other.qualifiers); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof FirstKeyValueMatchingQualifiersFilter))) { + return false; + } + FirstKeyValueMatchingQualifiersFilter f = (FirstKeyValueMatchingQualifiersFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.qualifiers); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java index 714c550ddd..e5f15f0113 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java @@ -1,19 +1,16 @@ /** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. */ package org.apache.hadoop.hbase.filter; @@ -21,23 +18,23 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; +import java.util.Objects; import java.util.PriorityQueue; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.PrivateCellUtil; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; -import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; -import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.UnsafeAccess; import org.apache.hadoop.hbase.util.UnsafeAvailChecker; - import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; +import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations; +import org.apache.yetus.audience.InterfaceAudience; /** * This is optimized version of a standard FuzzyRowFilter Filters data based on fuzzy row key. @@ -80,8 +77,8 @@ public class FuzzyRowFilter extends FilterBase { for (Pair aFuzzyKeysData : fuzzyKeysData) { if (aFuzzyKeysData.getFirst().length != aFuzzyKeysData.getSecond().length) { - Pair readable = - new Pair<>(Bytes.toStringBinary(aFuzzyKeysData.getFirst()), Bytes.toStringBinary(aFuzzyKeysData.getSecond())); + Pair readable = new Pair<>(Bytes.toStringBinary(aFuzzyKeysData.getFirst()), + Bytes.toStringBinary(aFuzzyKeysData.getSecond())); throw new IllegalArgumentException("Fuzzy pair lengths do not match: " + readable); } @@ -100,7 +97,6 @@ public class FuzzyRowFilter extends FilterBase { this.tracker = new RowTracker(); } - private void preprocessSearchKey(Pair p) { if (!UNSAFE_UNALIGNED) { // do nothing @@ -127,7 +123,9 @@ public class FuzzyRowFilter extends FilterBase { // do nothing return mask; } - if (isPreprocessedMask(mask)) return mask; + if (isPreprocessedMask(mask)) { + return mask; + } for (int i = 0; i < mask.length; i++) { if (mask[i] == 0) { mask[i] = -1; // 0 -> -1 @@ -164,9 +162,8 @@ public class FuzzyRowFilter extends FilterBase { for (int j = 0; j < fuzzyData.getSecond().length; j++) { fuzzyData.getSecond()[j] >>= 2; } - SatisfiesCode satisfiesCode = - satisfies(isReversed(), c.getRowArray(), c.getRowOffset(), c.getRowLength(), - fuzzyData.getFirst(), fuzzyData.getSecond()); + SatisfiesCode satisfiesCode = satisfies(isReversed(), c.getRowArray(), c.getRowOffset(), + c.getRowLength(), fuzzyData.getFirst(), fuzzyData.getSecond()); if (satisfiesCode == SatisfiesCode.YES) { lastFoundIndex = index; return ReturnCode.INCLUDE; @@ -204,14 +201,14 @@ public class FuzzyRowFilter extends FilterBase { RowTracker() { nextRows = new PriorityQueue<>(fuzzyKeysData.size(), - new Comparator>>() { - @Override - public int compare(Pair> o1, - Pair> o2) { - return isReversed()? Bytes.compareTo(o2.getFirst(), o1.getFirst()): - Bytes.compareTo(o1.getFirst(), o2.getFirst()); - } - }); + new Comparator>>() { + @Override + public int compare(Pair> o1, + Pair> o2) { + return isReversed() ? Bytes.compareTo(o2.getFirst(), o1.getFirst()) + : Bytes.compareTo(o1.getFirst(), o2.getFirst()); + } + }); } byte[] nextRow() { @@ -240,7 +237,8 @@ public class FuzzyRowFilter extends FilterBase { } boolean lessThan(Cell currentCell, byte[] nextRowKey) { - int compareResult = CellComparator.getInstance().compareRows(currentCell, nextRowKey, 0, nextRowKey.length); + int compareResult = + CellComparator.getInstance().compareRows(currentCell, nextRowKey, 0, nextRowKey.length); return (!isReversed() && compareResult < 0) || (isReversed() && compareResult > 0); } @@ -315,11 +313,17 @@ public class FuzzyRowFilter extends FilterBase { // Utility methods static enum SatisfiesCode { - /** row satisfies fuzzy rule */ + /** + * row satisfies fuzzy rule + */ YES, - /** row doesn't satisfy fuzzy rule, but there's possible greater row that does */ + /** + * row doesn't satisfy fuzzy rule, but there's possible greater row that does + */ NEXT_EXISTS, - /** row doesn't satisfy fuzzy rule and there's no greater row that does */ + /** + * row doesn't satisfy fuzzy rule and there's no greater row that does + */ NO_NEXT } @@ -455,7 +459,9 @@ public class FuzzyRowFilter extends FilterBase { return getNextForFuzzyRule(reverse, row, 0, row.length, fuzzyKeyBytes, fuzzyKeyMeta); } - /** Abstracts directional comparisons based on scan direction. */ + /** + * Abstracts directional comparisons based on scan direction. + */ private enum Order { ASC { @Override @@ -516,19 +522,29 @@ public class FuzzyRowFilter extends FilterBase { return reverse ? DESC : ASC; } - /** Returns true when {@code lhs < rhs}. */ + /** + * Returns true when {@code lhs < rhs}. + */ public abstract boolean lt(int lhs, int rhs); - /** Returns true when {@code lhs > rhs}. */ + /** + * Returns true when {@code lhs > rhs}. + */ public abstract boolean gt(int lhs, int rhs); - /** Returns {@code val} incremented by 1. */ + /** + * Returns {@code val} incremented by 1. + */ public abstract byte inc(byte val); - /** Return true when {@code val} is the maximum value */ + /** + * Return true when {@code val} is the maximum value + */ public abstract boolean isMax(byte val); - /** Return the minimum value according to this ordering scheme. */ + /** + * Return the minimum value according to this ordering scheme. + */ public abstract byte min(); } @@ -598,29 +614,29 @@ public class FuzzyRowFilter extends FilterBase { } } - return reverse? result: trimTrailingZeroes(result, fuzzyKeyMeta, toInc); + return reverse ? result : trimTrailingZeroes(result, fuzzyKeyMeta, toInc); } /** - * For forward scanner, next cell hint should not contain any trailing zeroes - * unless they are part of fuzzyKeyMeta - * hint = '\x01\x01\x01\x00\x00' - * will skip valid row '\x01\x01\x01' - * + * For forward scanner, next cell hint should not contain any trailing zeroes unless they are part + * of fuzzyKeyMeta hint = '\x01\x01\x01\x00\x00' will skip valid row '\x01\x01\x01' * @param result * @param fuzzyKeyMeta * @param toInc - position of incremented byte * @return trimmed version of result */ - + private static byte[] trimTrailingZeroes(byte[] result, byte[] fuzzyKeyMeta, int toInc) { - int off = fuzzyKeyMeta.length >= result.length? result.length -1: - fuzzyKeyMeta.length -1; - for( ; off >= 0; off--){ - if(fuzzyKeyMeta[off] != 0) break; + int off = fuzzyKeyMeta.length >= result.length ? result.length - 1 : fuzzyKeyMeta.length - 1; + for (; off >= 0; off--) { + if (fuzzyKeyMeta[off] != 0) { + break; + } + } + if (off < toInc) { + off = toInc; } - if (off < toInc) off = toInc; - byte[] retValue = new byte[off+1]; + byte[] retValue = new byte[off + 1]; System.arraycopy(result, 0, retValue, 0, retValue.length); return retValue; } @@ -631,19 +647,39 @@ public class FuzzyRowFilter extends FilterBase { */ @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof FuzzyRowFilter)) return false; + if (o == this) { + return true; + } + if (!(o instanceof FuzzyRowFilter)) { + return false; + } FuzzyRowFilter other = (FuzzyRowFilter) o; - if (this.fuzzyKeysData.size() != other.fuzzyKeysData.size()) return false; + if (this.fuzzyKeysData.size() != other.fuzzyKeysData.size()) { + return false; + } for (int i = 0; i < fuzzyKeysData.size(); ++i) { Pair thisData = this.fuzzyKeysData.get(i); Pair otherData = other.fuzzyKeysData.get(i); - if (!(Bytes.equals(thisData.getFirst(), otherData.getFirst()) && Bytes.equals( - thisData.getSecond(), otherData.getSecond()))) { + if (!(Bytes.equals(thisData.getFirst(), otherData.getFirst()) + && Bytes.equals(thisData.getSecond(), otherData.getSecond()))) { return false; } } return true; } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof FuzzyRowFilter))) { + return false; + } + FuzzyRowFilter f = (FuzzyRowFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.fuzzyKeysData.toArray()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java index 37564994de..0e1749a3ea 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.filter; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; @@ -131,4 +132,18 @@ public class InclusiveStopFilter extends FilterBase { public String toString() { return this.getClass().getSimpleName() + " " + Bytes.toStringBinary(this.stopRowKey); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof InclusiveStopFilter))) { + return false; + } + InclusiveStopFilter f = (InclusiveStopFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(Bytes.hashCode(this.stopRowKey)); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java index a66441bceb..2689d5ba8c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java @@ -1,20 +1,12 @@ /* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable + * law or agreed to in writing, software distributed under the License is distributed on an "AS IS" + * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License + * for the specific language governing permissions and limitations under the License. */ package org.apache.hadoop.hbase.filter; @@ -23,33 +15,39 @@ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; +import java.util.Objects; import java.util.Optional; + import org.apache.hadoop.hbase.ByteBufferExtendedCell; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; -import org.apache.yetus.audience.InterfaceAudience; - import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; -import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; +import org.apache.yetus.audience.InterfaceAudience; /** - * A filter that will only return the key component of each KV (the value will - * be rewritten as empty). + * A filter that will only return the key component of each KV (the value will be rewritten as + * empty). *

- * This filter can be used to grab all of the keys without having to also grab - * the values. + * This filter can be used to grab all of the keys without having to also grab the values. */ @InterfaceAudience.Public public class KeyOnlyFilter extends FilterBase { boolean lenAsVal; - public KeyOnlyFilter() { this(false); } - public KeyOnlyFilter(boolean lenAsVal) { this.lenAsVal = lenAsVal; } + + public KeyOnlyFilter() { + this(false); + } + + public KeyOnlyFilter(boolean lenAsVal) { + this.lenAsVal = lenAsVal; + } @Override public boolean filterRowKey(Cell cell) throws IOException { @@ -57,6 +55,10 @@ public class KeyOnlyFilter extends FilterBase { return false; } + public boolean isLenAsVal() { + return this.lenAsVal; + } + @Override public Cell transformCell(Cell cell) { return createKeyOnlyCell(cell); @@ -81,9 +83,9 @@ public class KeyOnlyFilter extends FilterBase { return ReturnCode.INCLUDE; } - public static Filter createFilterFromArguments(ArrayList filterArguments) { + public static Filter createFilterFromArguments(ArrayList filterArguments) { Preconditions.checkArgument((filterArguments.isEmpty() || filterArguments.size() == 1), - "Expected: 0 or 1 but got: %s", filterArguments.size()); + "Expected: 0 or 1 but got: %s", filterArguments.size()); KeyOnlyFilter filter = new KeyOnlyFilter(); if (filterArguments.size() == 1) { filter.lenAsVal = ParseFilter.convertByteArrayToBoolean(filterArguments.get(0)); @@ -95,9 +97,8 @@ public class KeyOnlyFilter extends FilterBase { * @return The filter serialized using pb */ @Override - public byte [] toByteArray() { - FilterProtos.KeyOnlyFilter.Builder builder = - FilterProtos.KeyOnlyFilter.newBuilder(); + public byte[] toByteArray() { + FilterProtos.KeyOnlyFilter.Builder builder = FilterProtos.KeyOnlyFilter.newBuilder(); builder.setLenAsVal(this.lenAsVal); return builder.build().toByteArray(); } @@ -108,8 +109,7 @@ public class KeyOnlyFilter extends FilterBase { * @throws DeserializationException * @see #toByteArray */ - public static KeyOnlyFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static KeyOnlyFilter parseFrom(final byte[] pbBytes) throws DeserializationException { FilterProtos.KeyOnlyFilter proto; try { proto = FilterProtos.KeyOnlyFilter.parseFrom(pbBytes); @@ -121,15 +121,19 @@ public class KeyOnlyFilter extends FilterBase { /** * @param o the other filter to compare with - * @return true if and only if the fields of the filter that are serialized - * are equal to the corresponding fields in other. Used for testing. + * @return true if and only if the fields of the filter that are serialized are equal to the + * corresponding fields in other. Used for testing. */ @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof KeyOnlyFilter)) return false; + if (o == this) { + return true; + } + if (!(o instanceof KeyOnlyFilter)) { + return false; + } - KeyOnlyFilter other = (KeyOnlyFilter)o; + KeyOnlyFilter other = (KeyOnlyFilter) o; return this.lenAsVal == other.lenAsVal; } @@ -202,7 +206,6 @@ public class KeyOnlyFilter extends FilterBase { return cell.getType(); } - @Override public long getSequenceId() { return 0; @@ -247,9 +250,23 @@ public class KeyOnlyFilter extends FilterBase { } } + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof KeyOnlyFilter))) { + return false; + } + KeyOnlyFilter f = (KeyOnlyFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.lenAsVal); + } + static class KeyOnlyByteBufferExtendedCell extends ByteBufferExtendedCell { - public static final int FIXED_OVERHEAD = ClassSize.OBJECT + ClassSize.REFERENCE - + Bytes.SIZEOF_BOOLEAN; + public static final int FIXED_OVERHEAD = + ClassSize.OBJECT + ClassSize.REFERENCE + Bytes.SIZEOF_BOOLEAN; private ByteBufferExtendedCell cell; private boolean lenAsVal; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java index dfd6297771..f54183b16b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.filter; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -512,5 +513,41 @@ public class MultiRowRangeFilter extends FilterBase { || Bytes.compareTo(startRow, stopRow) < 0 || (Bytes.compareTo(startRow, stopRow) == 0 && stopRowInclusive == true); } + @Override + public boolean equals(Object obj){ + if (obj == null || (!(obj instanceof RowRange))) { + return false; + } + if (this == obj) { + return true; + } + RowRange rr = (RowRange) obj; + return Bytes.equals(this.stopRow, rr.getStopRow()) + &&Bytes.equals(this.startRow,this.getStartRow()) + &&this.startRowInclusive==rr.isStartRowInclusive() + &&this.stopRowInclusive==rr.isStopRowInclusive(); + } + @Override + public int hashCode() { + return Objects.hash(Bytes.hashCode(this.stopRow), + Bytes.hashCode(this.startRow), + this.startRowInclusive, + this.stopRowInclusive); + } + } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof MultiRowRangeFilter))) { + return false; + } + MultiRowRangeFilter f = (MultiRowRangeFilter) obj; + return this.areSerializedFieldsEqual(f); } + + @Override + public int hashCode() { + return Objects.hash(this.rangeList.toArray()); + } + } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java index 47feea7f36..29b5a7e05d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java @@ -1,62 +1,56 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable + * law or agreed to in writing, software distributed under the License is distributed on an "AS IS" + * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License + * for the specific language governing permissions and limitations under the License. */ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; import java.util.Comparator; +import java.util.Objects; import java.util.TreeSet; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.PrivateCellUtil; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; -import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; -import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations; +import org.apache.yetus.audience.InterfaceAudience; /** - * This filter is used for selecting only those keys with columns that matches - * a particular prefix. For example, if prefix is 'an', it will pass keys will - * columns like 'and', 'anti' but not keys with columns like 'ball', 'act'. + * This filter is used for selecting only those keys with columns that matches a particular prefix. + * For example, if prefix is 'an', it will pass keys will columns like 'and', 'anti' but not keys + * with columns like 'ball', 'act'. */ @InterfaceAudience.Public public class MultipleColumnPrefixFilter extends FilterBase { - protected byte [] hint = null; - protected TreeSet sortedPrefixes = createTreeSet(); + protected byte[] hint = null; + protected TreeSet sortedPrefixes = createTreeSet(); private final static int MAX_LOG_PREFIXES = 5; - public MultipleColumnPrefixFilter(final byte [][] prefixes) { + public MultipleColumnPrefixFilter(final byte[][] prefixes) { if (prefixes != null) { for (int i = 0; i < prefixes.length; i++) { if (!sortedPrefixes.add(prefixes[i])) - throw new IllegalArgumentException ("prefixes must be distinct"); + throw new IllegalArgumentException("prefixes must be distinct"); } } } - public byte [][] getPrefix() { + public byte[][] getPrefix() { int count = 0; - byte [][] temp = new byte [sortedPrefixes.size()][]; - for (byte [] prefixes : sortedPrefixes) { - temp [count++] = prefixes; + byte[][] temp = new byte[sortedPrefixes.size()][]; + for (byte[] prefixes : sortedPrefixes) { + temp[count++] = prefixes; } return temp; } @@ -83,17 +77,17 @@ public class MultipleColumnPrefixFilter extends FilterBase { } public ReturnCode filterColumn(Cell cell) { - byte [] qualifier = CellUtil.cloneQualifier(cell); - TreeSet lesserOrEqualPrefixes = - (TreeSet) sortedPrefixes.headSet(qualifier, true); + byte[] qualifier = CellUtil.cloneQualifier(cell); + TreeSet lesserOrEqualPrefixes = + (TreeSet) sortedPrefixes.headSet(qualifier, true); if (lesserOrEqualPrefixes.size() != 0) { - byte [] largestPrefixSmallerThanQualifier = lesserOrEqualPrefixes.last(); - + byte[] largestPrefixSmallerThanQualifier = lesserOrEqualPrefixes.last(); + if (Bytes.startsWith(qualifier, largestPrefixSmallerThanQualifier)) { return ReturnCode.INCLUDE; } - + if (lesserOrEqualPrefixes.size() == sortedPrefixes.size()) { return ReturnCode.NEXT_ROW; } else { @@ -106,10 +100,10 @@ public class MultipleColumnPrefixFilter extends FilterBase { } } - public static Filter createFilterFromArguments(ArrayList filterArguments) { - byte [][] prefixes = new byte [filterArguments.size()][]; - for (int i = 0 ; i < filterArguments.size(); i++) { - byte [] columnPrefix = ParseFilter.removeQuotesFromByteArray(filterArguments.get(i)); + public static Filter createFilterFromArguments(ArrayList filterArguments) { + byte[][] prefixes = new byte[filterArguments.size()][]; + for (int i = 0; i < filterArguments.size(); i++) { + byte[] columnPrefix = ParseFilter.removeQuotesFromByteArray(filterArguments.get(i)); prefixes[i] = columnPrefix; } return new MultipleColumnPrefixFilter(prefixes); @@ -119,11 +113,13 @@ public class MultipleColumnPrefixFilter extends FilterBase { * @return The filter serialized using pb */ @Override - public byte [] toByteArray() { + public byte[] toByteArray() { FilterProtos.MultipleColumnPrefixFilter.Builder builder = - FilterProtos.MultipleColumnPrefixFilter.newBuilder(); - for (byte [] element : sortedPrefixes) { - if (element != null) builder.addSortedPrefixes(UnsafeByteOperations.unsafeWrap(element)); + FilterProtos.MultipleColumnPrefixFilter.newBuilder(); + for (byte[] element : sortedPrefixes) { + if (element != null) { + builder.addSortedPrefixes(UnsafeByteOperations.unsafeWrap(element)); + } } return builder.build().toByteArray(); } @@ -134,8 +130,8 @@ public class MultipleColumnPrefixFilter extends FilterBase { * @throws DeserializationException * @see #toByteArray */ - public static MultipleColumnPrefixFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static MultipleColumnPrefixFilter parseFrom(final byte[] pbBytes) + throws DeserializationException { FilterProtos.MultipleColumnPrefixFilter proto; try { proto = FilterProtos.MultipleColumnPrefixFilter.parseFrom(pbBytes); @@ -143,7 +139,7 @@ public class MultipleColumnPrefixFilter extends FilterBase { throw new DeserializationException(e); } int numPrefixes = proto.getSortedPrefixesCount(); - byte [][] prefixes = new byte[numPrefixes][]; + byte[][] prefixes = new byte[numPrefixes][]; for (int i = 0; i < numPrefixes; ++i) { prefixes[i] = proto.getSortedPrefixes(i).toByteArray(); } @@ -153,15 +149,19 @@ public class MultipleColumnPrefixFilter extends FilterBase { /** * @param o the other filter to compare with - * @return true if and only if the fields of the filter that are serialized - * are equal to the corresponding fields in other. Used for testing. + * @return true if and only if the fields of the filter that are serialized are equal to the + * corresponding fields in other. Used for testing. */ @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof MultipleColumnPrefixFilter)) return false; + if (o == this) { + return true; + } + if (!(o instanceof MultipleColumnPrefixFilter)) { + return false; + } - MultipleColumnPrefixFilter other = (MultipleColumnPrefixFilter)o; + MultipleColumnPrefixFilter other = (MultipleColumnPrefixFilter) o; return this.sortedPrefixes.equals(other.sortedPrefixes); } @@ -170,18 +170,19 @@ public class MultipleColumnPrefixFilter extends FilterBase { return PrivateCellUtil.createFirstOnRowCol(cell, hint, 0, hint.length); } - public TreeSet createTreeSet() { + public TreeSet createTreeSet() { return new TreeSet<>(new Comparator() { - @Override - public int compare (Object o1, Object o2) { - if (o1 == null || o2 == null) - throw new IllegalArgumentException ("prefixes can't be null"); - - byte [] b1 = (byte []) o1; - byte [] b2 = (byte []) o2; - return Bytes.compareTo (b1, 0, b1.length, b2, 0, b2.length); + @Override + public int compare(Object o1, Object o2) { + if (o1 == null || o2 == null) { + throw new IllegalArgumentException("prefixes can't be null"); } - }); + + byte[] b1 = (byte[]) o1; + byte[] b2 = (byte[]) o2; + return Bytes.compareTo(b1, 0, b1.length, b2, 0, b2.length); + } + }); } @Override @@ -204,7 +205,22 @@ public class MultipleColumnPrefixFilter extends FilterBase { } } - return String.format("%s (%d/%d): [%s]", this.getClass().getSimpleName(), - count, this.sortedPrefixes.size(), prefixes.toString()); + return String.format("%s (%d/%d): [%s]", this.getClass().getSimpleName(), count, + this.sortedPrefixes.size(), prefixes.toString()); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof MultipleColumnPrefixFilter))) { + return false; + } + MultipleColumnPrefixFilter f = (MultipleColumnPrefixFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.sortedPrefixes); } + } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java index 30ddf24a78..06f777d629 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -150,4 +151,18 @@ public class PageFilter extends FilterBase { public String toString() { return this.getClass().getSimpleName() + " " + this.pageSize; } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof PageFilter))) { + return false; + } + PageFilter f = (PageFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.pageSize); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java index df9177bf8a..e243933a5b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.filter; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.ByteBufferExtendedCell; import org.apache.hadoop.hbase.Cell; @@ -156,4 +157,18 @@ public class PrefixFilter extends FilterBase { public String toString() { return this.getClass().getSimpleName() + " " + Bytes.toStringBinary(this.prefix); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof PrefixFilter))) { + return false; + } + PrefixFilter f = (PrefixFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(Bytes.hashCode(this.getPrefix())); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java index b38c0108df..0125229935 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java @@ -1,48 +1,42 @@ /** - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. */ - package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; +import org.apache.yetus.audience.InterfaceAudience; /** - * This filter is used to filter based on the column qualifier. It takes an - * operator (equal, greater, not equal, etc) and a byte [] comparator for the - * column qualifier portion of a key. + * This filter is used to filter based on the column qualifier. It takes an operator (equal, + * greater, not equal, etc) and a byte [] comparator for the column qualifier portion of a key. *

- * This filter can be wrapped with {@link WhileMatchFilter} and {@link SkipFilter} - * to add more control. + * This filter can be wrapped with {@link WhileMatchFilter} and {@link SkipFilter} to add more + * control. *

* Multiple filters can be combined using {@link FilterList}. *

- * If an already known column qualifier is looked for, - * use {@link org.apache.hadoop.hbase.client.Get#addColumn} - * directly rather than a filter. + * If an already known column qualifier is looked for, use + * {@link org.apache.hadoop.hbase.client.Get#addColumn} directly rather than a filter. */ @InterfaceAudience.Public public class QualifierFilter extends CompareFilter { @@ -51,12 +45,11 @@ public class QualifierFilter extends CompareFilter { * Constructor. * @param op the compare op for column qualifier matching * @param qualifierComparator the comparator for column qualifier matching - * @deprecated Since 2.0.0. Will be removed in 3.0.0. - * Use {@link #QualifierFilter(CompareOperator, ByteArrayComparable)} instead. + * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use + * {@link #QualifierFilter(CompareOperator, ByteArrayComparable)} instead. */ @Deprecated - public QualifierFilter(final CompareOp op, - final ByteArrayComparable qualifierComparator) { + public QualifierFilter(final CompareOp op, final ByteArrayComparable qualifierComparator) { super(op, qualifierComparator); } @@ -65,8 +58,7 @@ public class QualifierFilter extends CompareFilter { * @param op the compare op for column qualifier matching * @param qualifierComparator the comparator for column qualifier matching */ - public QualifierFilter(final CompareOperator op, - final ByteArrayComparable qualifierComparator) { + public QualifierFilter(final CompareOperator op, final ByteArrayComparable qualifierComparator) { super(op, qualifierComparator); } @@ -87,10 +79,10 @@ public class QualifierFilter extends CompareFilter { return ReturnCode.INCLUDE; } - public static Filter createFilterFromArguments(ArrayList filterArguments) { + public static Filter createFilterFromArguments(ArrayList filterArguments) { ArrayList arguments = CompareFilter.extractArguments(filterArguments); - CompareOperator compareOp = (CompareOperator)arguments.get(0); - ByteArrayComparable comparator = (ByteArrayComparable)arguments.get(1); + CompareOperator compareOp = (CompareOperator) arguments.get(0); + ByteArrayComparable comparator = (ByteArrayComparable) arguments.get(1); return new QualifierFilter(compareOp, comparator); } @@ -98,9 +90,8 @@ public class QualifierFilter extends CompareFilter { * @return The filter serialized using pb */ @Override - public byte [] toByteArray() { - FilterProtos.QualifierFilter.Builder builder = - FilterProtos.QualifierFilter.newBuilder(); + public byte[] toByteArray() { + FilterProtos.QualifierFilter.Builder builder = FilterProtos.QualifierFilter.newBuilder(); builder.setCompareFilter(super.convert()); return builder.build().toByteArray(); } @@ -111,8 +102,7 @@ public class QualifierFilter extends CompareFilter { * @throws org.apache.hadoop.hbase.exceptions.DeserializationException * @see #toByteArray */ - public static QualifierFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static QualifierFilter parseFrom(final byte[] pbBytes) throws DeserializationException { FilterProtos.QualifierFilter proto; try { proto = FilterProtos.QualifierFilter.parseFrom(pbBytes); @@ -120,7 +110,7 @@ public class QualifierFilter extends CompareFilter { throw new DeserializationException(e); } final CompareOperator valueCompareOp = - CompareOperator.valueOf(proto.getCompareFilter().getCompareOp().name()); + CompareOperator.valueOf(proto.getCompareFilter().getCompareOp().name()); ByteArrayComparable valueComparator = null; try { if (proto.getCompareFilter().hasComparator()) { @@ -129,18 +119,36 @@ public class QualifierFilter extends CompareFilter { } catch (IOException ioe) { throw new DeserializationException(ioe); } - return new QualifierFilter(valueCompareOp,valueComparator); + return new QualifierFilter(valueCompareOp, valueComparator); } /** - * @return true if and only if the fields of the filter that are serialized - * are equal to the corresponding fields in other. Used for testing. + * @return true if and only if the fields of the filter that are serialized are equal to the + * corresponding fields in other. Used for testing. */ @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof QualifierFilter)) return false; + if (o == this) { + return true; + } + if (!(o instanceof QualifierFilter)) { + return false; + } return super.areSerializedFieldsEqual(o); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof QualifierFilter))) { + return false; + } + QualifierFilter f = (QualifierFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.getComparator(), this.getCompareOperator()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java index 58624d27ff..369ddd7df4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.filter; +import java.util.Objects; import java.util.Random; import org.apache.hadoop.hbase.Cell; @@ -117,9 +118,8 @@ public class RandomRowFilter extends FilterBase { * @return The filter serialized using pb */ @Override - public byte [] toByteArray() { - FilterProtos.RandomRowFilter.Builder builder = - FilterProtos.RandomRowFilter.newBuilder(); + public byte[] toByteArray() { + FilterProtos.RandomRowFilter.Builder builder = FilterProtos.RandomRowFilter.newBuilder(); builder.setChance(this.chance); return builder.build().toByteArray(); } @@ -130,8 +130,7 @@ public class RandomRowFilter extends FilterBase { * @throws DeserializationException * @see #toByteArray */ - public static RandomRowFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static RandomRowFilter parseFrom(final byte[] pbBytes) throws DeserializationException { FilterProtos.RandomRowFilter proto; try { proto = FilterProtos.RandomRowFilter.parseFrom(pbBytes); @@ -148,10 +147,28 @@ public class RandomRowFilter extends FilterBase { */ @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof RandomRowFilter)) return false; + if (o == this) { + return true; + } + if (!(o instanceof RandomRowFilter)) { + return false; + } - RandomRowFilter other = (RandomRowFilter)o; + RandomRowFilter other = (RandomRowFilter) o; return this.getChance() == other.getChance(); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof RandomRowFilter))) { + return false; + } + RandomRowFilter f = (RandomRowFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.getChance()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java index 8eba3ba2fb..435caab38b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; - +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; import org.apache.yetus.audience.InterfaceAudience; @@ -160,4 +160,18 @@ public class RowFilter extends CompareFilter { return super.areSerializedFieldsEqual(o); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof RowFilter))) { + return false; + } + RowFilter f = (RowFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.getComparator(), this.getCompareOperator()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java index 566c408d01..4444a19381 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java @@ -1,126 +1,111 @@ /** - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. */ - package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; +import org.apache.hadoop.hbase.util.Bytes; import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; +import org.apache.yetus.audience.InterfaceAudience; /** - * A {@link Filter} that checks a single column value, but does not emit the - * tested column. This will enable a performance boost over - * {@link SingleColumnValueFilter}, if the tested column value is not actually - * needed as input (besides for the filtering itself). + * A {@link Filter} that checks a single column value, but does not emit the tested column. This + * will enable a performance boost over {@link SingleColumnValueFilter}, if the tested column value + * is not actually needed as input (besides for the filtering itself). */ @InterfaceAudience.Public public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { /** - * Constructor for binary compare of the value of a single column. If the - * column is found and the condition passes, all columns of the row will be - * emitted; except for the tested column value. If the column is not found or - * the condition fails, the row will not be emitted. - * + * Constructor for binary compare of the value of a single column. If the column is found and the + * condition passes, all columns of the row will be emitted; except for the tested column value. + * If the column is not found or the condition fails, the row will not be emitted. * @param family name of column family * @param qualifier name of column qualifier * @param compareOp operator * @param value value to compare column values against - * {@link #SingleColumnValueExcludeFilter(byte[], byte[], CompareOperator, byte[])} + * {@link #SingleColumnValueExcludeFilter(byte[], byte[], CompareOperator, byte[])} */ @Deprecated - public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, - CompareOp compareOp, byte[] value) { + public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, CompareOp compareOp, + byte[] value) { super(family, qualifier, compareOp, value); } /** - * Constructor for binary compare of the value of a single column. If the - * column is found and the condition passes, all columns of the row will be - * emitted; except for the tested column value. If the column is not found or - * the condition fails, the row will not be emitted. - * + * Constructor for binary compare of the value of a single column. If the column is found and the + * condition passes, all columns of the row will be emitted; except for the tested column value. + * If the column is not found or the condition fails, the row will not be emitted. * @param family name of column family * @param qualifier name of column qualifier * @param op operator * @param value value to compare column values against */ - public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, - CompareOperator op, byte[] value) { + public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, CompareOperator op, + byte[] value) { super(family, qualifier, op, value); } /** - * Constructor for binary compare of the value of a single column. If the - * column is found and the condition passes, all columns of the row will be - * emitted; except for the tested column value. If the condition fails, the - * row will not be emitted. + * Constructor for binary compare of the value of a single column. If the column is found and the + * condition passes, all columns of the row will be emitted; except for the tested column value. + * If the condition fails, the row will not be emitted. *

- * Use the filterIfColumnMissing flag to set whether the rest of the columns - * in a row will be emitted if the specified column to check is not found in - * the row. - * + * Use the filterIfColumnMissing flag to set whether the rest of the columns in a row will be + * emitted if the specified column to check is not found in the row. * @param family name of column family * @param qualifier name of column qualifier * @param compareOp operator * @param comparator Comparator to use. * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use - * {@link #SingleColumnValueExcludeFilter(byte[], byte[], CompareOperator, ByteArrayComparable)} + * {@link #SingleColumnValueExcludeFilter(byte[], byte[], CompareOperator, ByteArrayComparable)} */ @Deprecated - public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, - CompareOp compareOp, ByteArrayComparable comparator) { + public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, CompareOp compareOp, + ByteArrayComparable comparator) { super(family, qualifier, compareOp, comparator); } /** - * Constructor for binary compare of the value of a single column. If the - * column is found and the condition passes, all columns of the row will be - * emitted; except for the tested column value. If the condition fails, the - * row will not be emitted. + * Constructor for binary compare of the value of a single column. If the column is found and the + * condition passes, all columns of the row will be emitted; except for the tested column value. + * If the condition fails, the row will not be emitted. *

- * Use the filterIfColumnMissing flag to set whether the rest of the columns - * in a row will be emitted if the specified column to check is not found in - * the row. - * + * Use the filterIfColumnMissing flag to set whether the rest of the columns in a row will be + * emitted if the specified column to check is not found in the row. * @param family name of column family * @param qualifier name of column qualifier * @param op operator * @param comparator Comparator to use. */ - public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, - CompareOperator op, ByteArrayComparable comparator) { + public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, CompareOperator op, + ByteArrayComparable comparator) { super(family, qualifier, op, comparator); } - /** * Constructor for protobuf deserialization only. * @param family @@ -130,14 +115,14 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { * @param filterIfMissing * @param latestVersionOnly * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use - * {@link #SingleColumnValueExcludeFilter(byte[], byte[], CompareOperator, ByteArrayComparable, boolean, boolean)} + * {@link #SingleColumnValueExcludeFilter(byte[], byte[], CompareOperator, ByteArrayComparable, boolean, boolean)} */ @Deprecated protected SingleColumnValueExcludeFilter(final byte[] family, final byte[] qualifier, final CompareOp compareOp, ByteArrayComparable comparator, final boolean filterIfMissing, final boolean latestVersionOnly) { - this(family, qualifier, CompareOperator.valueOf(compareOp.name()), comparator, - filterIfMissing, latestVersionOnly); + this(family, qualifier, CompareOperator.valueOf(compareOp.name()), comparator, filterIfMissing, + latestVersionOnly); } /** @@ -158,7 +143,7 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { // We cleaned result row in FilterRow to be consistent with scanning process. @Override public boolean hasFilterRow() { - return true; + return true; } // Here we remove from row all key values from testing column @@ -174,12 +159,12 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { } } - public static Filter createFilterFromArguments(ArrayList filterArguments) { - SingleColumnValueFilter tempFilter = (SingleColumnValueFilter) - SingleColumnValueFilter.createFilterFromArguments(filterArguments); - SingleColumnValueExcludeFilter filter = new SingleColumnValueExcludeFilter ( - tempFilter.getFamily(), tempFilter.getQualifier(), - tempFilter.getOperator(), tempFilter.getComparator()); + public static Filter createFilterFromArguments(ArrayList filterArguments) { + SingleColumnValueFilter tempFilter = (SingleColumnValueFilter) SingleColumnValueFilter + .createFilterFromArguments(filterArguments); + SingleColumnValueExcludeFilter filter = + new SingleColumnValueExcludeFilter(tempFilter.getFamily(), tempFilter.getQualifier(), + tempFilter.getOperator(), tempFilter.getComparator()); if (filterArguments.size() == 6) { filter.setFilterIfMissing(tempFilter.getFilterIfMissing()); @@ -192,9 +177,9 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { * @return The filter serialized using pb */ @Override - public byte [] toByteArray() { + public byte[] toByteArray() { FilterProtos.SingleColumnValueExcludeFilter.Builder builder = - FilterProtos.SingleColumnValueExcludeFilter.newBuilder(); + FilterProtos.SingleColumnValueExcludeFilter.newBuilder(); builder.setSingleColumnValueFilter(super.convert()); return builder.build().toByteArray(); } @@ -205,8 +190,8 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { * @throws DeserializationException * @see #toByteArray */ - public static SingleColumnValueExcludeFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static SingleColumnValueExcludeFilter parseFrom(final byte[] pbBytes) + throws DeserializationException { FilterProtos.SingleColumnValueExcludeFilter proto; try { proto = FilterProtos.SingleColumnValueExcludeFilter.parseFrom(pbBytes); @@ -215,8 +200,7 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { } FilterProtos.SingleColumnValueFilter parentProto = proto.getSingleColumnValueFilter(); - final CompareOperator compareOp = - CompareOperator.valueOf(parentProto.getCompareOp().name()); + final CompareOperator compareOp = CompareOperator.valueOf(parentProto.getCompareOp().name()); final ByteArrayComparable comparator; try { comparator = ProtobufUtil.toComparator(parentProto.getComparator()); @@ -224,21 +208,46 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { throw new DeserializationException(ioe); } - return new SingleColumnValueExcludeFilter(parentProto.hasColumnFamily() ? parentProto - .getColumnFamily().toByteArray() : null, parentProto.hasColumnQualifier() ? parentProto - .getColumnQualifier().toByteArray() : null, compareOp, comparator, parentProto - .getFilterIfMissing(), parentProto.getLatestVersionOnly()); + return new SingleColumnValueExcludeFilter( + parentProto.hasColumnFamily() ? parentProto.getColumnFamily().toByteArray() : null, + parentProto.hasColumnQualifier() ? parentProto.getColumnQualifier().toByteArray() : null, + compareOp, comparator, parentProto.getFilterIfMissing(), + parentProto.getLatestVersionOnly()); } /** - * @return true if and only if the fields of the filter that are serialized - * are equal to the corresponding fields in other. Used for testing. + * @return true if and only if the fields of the filter that are serialized are equal to the + * corresponding fields in other. Used for testing. */ @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof SingleColumnValueExcludeFilter)) return false; + if (o == this) { + return true; + } + if (!(o instanceof SingleColumnValueExcludeFilter)) { + return false; + } return super.areSerializedFieldsEqual(o); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(SingleColumnValueExcludeFilter.class.isInstance(obj)))) { + return false; + } + SingleColumnValueExcludeFilter other = (SingleColumnValueExcludeFilter) obj; + return Bytes.equals(this.getFamily(), other.getFamily()) + && Bytes.equals(this.getQualifier(), other.getQualifier()) && this.op.equals(other.op) + && this.getComparator().areSerializedFieldsEqual(other.getComparator()) + && this.getFilterIfMissing() == other.getFilterIfMissing() + && this.getLatestVersionOnly() == other.getLatestVersionOnly(); + } + + @Override + public int hashCode() { + return Objects.hash(this.getClass().getName(), Bytes.hashCode(this.getFamily()), + Bytes.hashCode(this.getQualifier()), this.op, this.getComparator(), this.getFilterIfMissing(), + this.getLatestVersionOnly()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java index e5c83b1d72..cef5f009e0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java @@ -1,77 +1,69 @@ /** - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. */ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.PrivateCellUtil; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; -import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; -import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType; import org.apache.hadoop.hbase.util.Bytes; - import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; +import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations; +import org.apache.yetus.audience.InterfaceAudience; /** * This filter is used to filter cells based on value. It takes a {@link CompareFilter.CompareOp} - * operator (equal, greater, not equal, etc), and either a byte [] value or - * a ByteArrayComparable. + * operator (equal, greater, not equal, etc), and either a byte [] value or a ByteArrayComparable. *

- * If we have a byte [] value then we just do a lexicographic compare. For - * example, if passed value is 'b' and cell has 'a' and the compare operator - * is LESS, then we will filter out this cell (return true). If this is not - * sufficient (eg you want to deserialize a long and then compare it to a fixed - * long value), then you can pass in your own comparator instead. + * If we have a byte [] value then we just do a lexicographic compare. For example, if passed value + * is 'b' and cell has 'a' and the compare operator is LESS, then we will filter out this cell + * (return true). If this is not sufficient (eg you want to deserialize a long and then compare it + * to a fixed long value), then you can pass in your own comparator instead. *

- * You must also specify a family and qualifier. Only the value of this column - * will be tested. When using this filter on a - * {@link org.apache.hadoop.hbase.CellScanner} with specified - * inputs, the column to be tested should also be added as input (otherwise - * the filter will regard the column as missing). + * You must also specify a family and qualifier. Only the value of this column will be tested. When + * using this filter on a {@link org.apache.hadoop.hbase.CellScanner} with specified inputs, the + * column to be tested should also be added as input (otherwise the filter will regard the column as + * missing). *

- * To prevent the entire row from being emitted if the column is not found - * on a row, use {@link #setFilterIfMissing}. - * Otherwise, if the column is found, the entire row will be emitted only if - * the value passes. If the value fails, the row will be filtered out. + * To prevent the entire row from being emitted if the column is not found on a row, use + * {@link #setFilterIfMissing}. Otherwise, if the column is found, the entire row will be emitted + * only if the value passes. If the value fails, the row will be filtered out. *

- * In order to test values of previous versions (timestamps), set - * {@link #setLatestVersionOnly} to false. The default is true, meaning that - * only the latest version's value is tested and all previous versions are ignored. + * In order to test values of previous versions (timestamps), set {@link #setLatestVersionOnly} to + * false. The default is true, meaning that only the latest version's value is tested and all + * previous versions are ignored. *

* To filter based on the value of all scanned columns, use {@link ValueFilter}. */ @InterfaceAudience.Public public class SingleColumnValueFilter extends FilterBase { - protected byte [] columnFamily; - protected byte [] columnQualifier; + protected byte[] columnFamily; + protected byte[] columnQualifier; protected CompareOperator op; protected org.apache.hadoop.hbase.filter.ByteArrayComparable comparator; protected boolean foundColumn = false; @@ -80,86 +72,78 @@ public class SingleColumnValueFilter extends FilterBase { protected boolean latestVersionOnly = true; /** - * Constructor for binary compare of the value of a single column. If the - * column is found and the condition passes, all columns of the row will be - * emitted. If the condition fails, the row will not be emitted. + * Constructor for binary compare of the value of a single column. If the column is found and the + * condition passes, all columns of the row will be emitted. If the condition fails, the row will + * not be emitted. *

- * Use the filterIfColumnMissing flag to set whether the rest of the columns - * in a row will be emitted if the specified column to check is not found in - * the row. - * + * Use the filterIfColumnMissing flag to set whether the rest of the columns in a row will be + * emitted if the specified column to check is not found in the row. * @param family name of column family * @param qualifier name of column qualifier * @param compareOp operator * @param value value to compare column values against * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use - * {@link #SingleColumnValueFilter(byte[], byte[], CompareOperator, byte[])} instead. + * {@link #SingleColumnValueFilter(byte[], byte[], CompareOperator, byte[])} instead. */ @Deprecated - public SingleColumnValueFilter(final byte [] family, final byte [] qualifier, + public SingleColumnValueFilter(final byte[] family, final byte[] qualifier, final CompareOp compareOp, final byte[] value) { this(family, qualifier, CompareOperator.valueOf(compareOp.name()), - new org.apache.hadoop.hbase.filter.BinaryComparator(value)); + new org.apache.hadoop.hbase.filter.BinaryComparator(value)); } /** - * Constructor for binary compare of the value of a single column. If the - * column is found and the condition passes, all columns of the row will be - * emitted. If the condition fails, the row will not be emitted. + * Constructor for binary compare of the value of a single column. If the column is found and the + * condition passes, all columns of the row will be emitted. If the condition fails, the row will + * not be emitted. *

- * Use the filterIfColumnMissing flag to set whether the rest of the columns - * in a row will be emitted if the specified column to check is not found in - * the row. - * + * Use the filterIfColumnMissing flag to set whether the rest of the columns in a row will be + * emitted if the specified column to check is not found in the row. * @param family name of column family * @param qualifier name of column qualifier * @param op operator * @param value value to compare column values against */ - public SingleColumnValueFilter(final byte [] family, final byte [] qualifier, - final CompareOperator op, final byte[] value) { - this(family, qualifier, op, - new org.apache.hadoop.hbase.filter.BinaryComparator(value)); + public SingleColumnValueFilter(final byte[] family, final byte[] qualifier, + final CompareOperator op, final byte[] value) { + this(family, qualifier, op, new org.apache.hadoop.hbase.filter.BinaryComparator(value)); } /** - * Constructor for binary compare of the value of a single column. If the - * column is found and the condition passes, all columns of the row will be - * emitted. If the condition fails, the row will not be emitted. + * Constructor for binary compare of the value of a single column. If the column is found and the + * condition passes, all columns of the row will be emitted. If the condition fails, the row will + * not be emitted. *

- * Use the filterIfColumnMissing flag to set whether the rest of the columns - * in a row will be emitted if the specified column to check is not found in - * the row. - * + * Use the filterIfColumnMissing flag to set whether the rest of the columns in a row will be + * emitted if the specified column to check is not found in the row. * @param family name of column family * @param qualifier name of column qualifier * @param compareOp operator * @param comparator Comparator to use. * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use - * {@link #SingleColumnValueFilter(byte[], byte[], CompareOperator, ByteArrayComparable)} instead. + * {@link #SingleColumnValueFilter(byte[], byte[], CompareOperator, ByteArrayComparable)} + * instead. */ @Deprecated - public SingleColumnValueFilter(final byte [] family, final byte [] qualifier, + public SingleColumnValueFilter(final byte[] family, final byte[] qualifier, final CompareOp compareOp, final org.apache.hadoop.hbase.filter.ByteArrayComparable comparator) { this(family, qualifier, CompareOperator.valueOf(compareOp.name()), comparator); } /** - * Constructor for binary compare of the value of a single column. If the - * column is found and the condition passes, all columns of the row will be - * emitted. If the condition fails, the row will not be emitted. + * Constructor for binary compare of the value of a single column. If the column is found and the + * condition passes, all columns of the row will be emitted. If the condition fails, the row will + * not be emitted. *

- * Use the filterIfColumnMissing flag to set whether the rest of the columns - * in a row will be emitted if the specified column to check is not found in - * the row. - * + * Use the filterIfColumnMissing flag to set whether the rest of the columns in a row will be + * emitted if the specified column to check is not found in the row. * @param family name of column family * @param qualifier name of column qualifier * @param op operator * @param comparator Comparator to use. */ - public SingleColumnValueFilter(final byte [] family, final byte [] qualifier, + public SingleColumnValueFilter(final byte[] family, final byte[] qualifier, final CompareOperator op, final org.apache.hadoop.hbase.filter.ByteArrayComparable comparator) { this.columnFamily = family; @@ -177,16 +161,15 @@ public class SingleColumnValueFilter extends FilterBase { * @param filterIfMissing * @param latestVersionOnly * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use - * {@link #SingleColumnValueFilter(byte[], byte[], CompareOperator, ByteArrayComparable, - * boolean, boolean)} instead. + * {@link #SingleColumnValueFilter(byte[], byte[], CompareOperator, ByteArrayComparable, boolean, boolean)} + * instead. */ @Deprecated protected SingleColumnValueFilter(final byte[] family, final byte[] qualifier, final CompareOp compareOp, org.apache.hadoop.hbase.filter.ByteArrayComparable comparator, - final boolean filterIfMissing, - final boolean latestVersionOnly) { + final boolean filterIfMissing, final boolean latestVersionOnly) { this(family, qualifier, CompareOperator.valueOf(compareOp.name()), comparator, filterIfMissing, - latestVersionOnly); + latestVersionOnly); } /** @@ -200,7 +183,7 @@ public class SingleColumnValueFilter extends FilterBase { */ protected SingleColumnValueFilter(final byte[] family, final byte[] qualifier, final CompareOperator op, org.apache.hadoop.hbase.filter.ByteArrayComparable comparator, - final boolean filterIfMissing, final boolean latestVersionOnly) { + final boolean filterIfMissing, final boolean latestVersionOnly) { this(family, qualifier, op, comparator); this.filterIfMissing = filterIfMissing; this.latestVersionOnly = latestVersionOnly; @@ -208,7 +191,7 @@ public class SingleColumnValueFilter extends FilterBase { /** * @return operator - * @deprecated since 2.0.0. Will be removed in 3.0.0. Use {@link #getCompareOperator()} instead. + * @deprecated since 2.0.0. Will be removed in 3.0.0. Use {@link #getCompareOperator()} instead. */ @Deprecated public CompareOp getOperator() { @@ -254,7 +237,8 @@ public class SingleColumnValueFilter extends FilterBase { @Override public ReturnCode filterCell(final Cell c) { - // System.out.println("REMOVE KEY=" + keyValue.toString() + ", value=" + Bytes.toString(keyValue.getValue())); + // System.out.println("REMOVE KEY=" + keyValue.toString() + ", value=" + + // Bytes.toString(keyValue.getValue())); if (this.matchedColumn) { // We already found and matched the single column, all keys now pass return ReturnCode.INCLUDE; @@ -267,7 +251,7 @@ public class SingleColumnValueFilter extends FilterBase { } foundColumn = true; if (filterColumnValue(c)) { - return this.latestVersionOnly? ReturnCode.NEXT_ROW: ReturnCode.INCLUDE; + return this.latestVersionOnly ? ReturnCode.NEXT_ROW : ReturnCode.INCLUDE; } this.matchedColumn = true; return ReturnCode.INCLUDE; @@ -282,9 +266,9 @@ public class SingleColumnValueFilter extends FilterBase { public boolean filterRow() { // If column was found, return false if it was matched, true if it was not // If column not found, return true if we filter if missing, false if not - return this.foundColumn? !this.matchedColumn: this.filterIfMissing; + return this.foundColumn ? !this.matchedColumn : this.filterIfMissing; } - + @Override public boolean hasFilterRow() { return true; @@ -298,8 +282,8 @@ public class SingleColumnValueFilter extends FilterBase { /** * Get whether entire row should be filtered if column is not found. - * @return true if row should be skipped if column not found, false if row - * should be let through anyways + * @return true if row should be skipped if column not found, false if row should be let through + * anyways */ public boolean getFilterIfMissing() { return filterIfMissing; @@ -310,7 +294,7 @@ public class SingleColumnValueFilter extends FilterBase { *

* If true, the entire row will be skipped if the column is not found. *

- * If false, the row will pass if the column is not found. This is default. + * If false, the row will pass if the column is not found. This is default. * @param filterIfMissing flag */ public void setFilterIfMissing(boolean filterIfMissing) { @@ -318,10 +302,9 @@ public class SingleColumnValueFilter extends FilterBase { } /** - * Get whether only the latest version of the column value should be compared. - * If true, the row will be returned if only the latest version of the column - * value matches. If false, the row will be returned if any version of the - * column value matches. The default is true. + * Get whether only the latest version of the column value should be compared. If true, the row + * will be returned if only the latest version of the column value matches. If false, the row will + * be returned if any version of the column value matches. The default is true. * @return return value */ public boolean getLatestVersionOnly() { @@ -329,36 +312,32 @@ public class SingleColumnValueFilter extends FilterBase { } /** - * Set whether only the latest version of the column value should be compared. - * If true, the row will be returned if only the latest version of the column - * value matches. If false, the row will be returned if any version of the - * column value matches. The default is true. + * Set whether only the latest version of the column value should be compared. If true, the row + * will be returned if only the latest version of the column value matches. If false, the row will + * be returned if any version of the column value matches. The default is true. * @param latestVersionOnly flag */ public void setLatestVersionOnly(boolean latestVersionOnly) { this.latestVersionOnly = latestVersionOnly; } - public static Filter createFilterFromArguments(ArrayList filterArguments) { + public static Filter createFilterFromArguments(ArrayList filterArguments) { Preconditions.checkArgument(filterArguments.size() == 4 || filterArguments.size() == 6, - "Expected 4 or 6 but got: %s", filterArguments.size()); - byte [] family = ParseFilter.removeQuotesFromByteArray(filterArguments.get(0)); - byte [] qualifier = ParseFilter.removeQuotesFromByteArray(filterArguments.get(1)); + "Expected 4 or 6 but got: %s", filterArguments.size()); + byte[] family = ParseFilter.removeQuotesFromByteArray(filterArguments.get(0)); + byte[] qualifier = ParseFilter.removeQuotesFromByteArray(filterArguments.get(1)); CompareOperator op = ParseFilter.createCompareOperator(filterArguments.get(2)); - org.apache.hadoop.hbase.filter.ByteArrayComparable comparator = ParseFilter.createComparator( - ParseFilter.removeQuotesFromByteArray(filterArguments.get(3))); - - if (comparator instanceof RegexStringComparator || - comparator instanceof SubstringComparator) { - if (op != CompareOperator.EQUAL && - op != CompareOperator.NOT_EQUAL) { - throw new IllegalArgumentException ("A regexstring comparator and substring comparator " + - "can only be used with EQUAL and NOT_EQUAL"); + org.apache.hadoop.hbase.filter.ByteArrayComparable comparator = + ParseFilter.createComparator(ParseFilter.removeQuotesFromByteArray(filterArguments.get(3))); + + if (comparator instanceof RegexStringComparator || comparator instanceof SubstringComparator) { + if (op != CompareOperator.EQUAL && op != CompareOperator.NOT_EQUAL) { + throw new IllegalArgumentException("A regexstring comparator and substring comparator " + + "can only be used with EQUAL and NOT_EQUAL"); } } - SingleColumnValueFilter filter = new SingleColumnValueFilter(family, qualifier, - op, comparator); + SingleColumnValueFilter filter = new SingleColumnValueFilter(family, qualifier, op, comparator); if (filterArguments.size() == 6) { boolean filterIfMissing = ParseFilter.convertByteArrayToBoolean(filterArguments.get(4)); @@ -371,7 +350,7 @@ public class SingleColumnValueFilter extends FilterBase { FilterProtos.SingleColumnValueFilter convert() { FilterProtos.SingleColumnValueFilter.Builder builder = - FilterProtos.SingleColumnValueFilter.newBuilder(); + FilterProtos.SingleColumnValueFilter.newBuilder(); if (this.columnFamily != null) { builder.setColumnFamily(UnsafeByteOperations.unsafeWrap(this.columnFamily)); } @@ -391,7 +370,7 @@ public class SingleColumnValueFilter extends FilterBase { * @return The filter serialized using pb */ @Override - public byte [] toByteArray() { + public byte[] toByteArray() { return convert().toByteArray(); } @@ -401,8 +380,8 @@ public class SingleColumnValueFilter extends FilterBase { * @throws org.apache.hadoop.hbase.exceptions.DeserializationException * @see #toByteArray */ - public static SingleColumnValueFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static SingleColumnValueFilter parseFrom(final byte[] pbBytes) + throws DeserializationException { FilterProtos.SingleColumnValueFilter proto; try { proto = FilterProtos.SingleColumnValueFilter.parseFrom(pbBytes); @@ -410,8 +389,7 @@ public class SingleColumnValueFilter extends FilterBase { throw new DeserializationException(e); } - final CompareOperator compareOp = - CompareOperator.valueOf(proto.getCompareOp().name()); + final CompareOperator compareOp = CompareOperator.valueOf(proto.getCompareOp().name()); final org.apache.hadoop.hbase.filter.ByteArrayComparable comparator; try { comparator = ProtobufUtil.toComparator(proto.getComparator()); @@ -419,34 +397,37 @@ public class SingleColumnValueFilter extends FilterBase { throw new DeserializationException(ioe); } - return new SingleColumnValueFilter(proto.hasColumnFamily() ? proto.getColumnFamily() - .toByteArray() : null, proto.hasColumnQualifier() ? proto.getColumnQualifier() - .toByteArray() : null, compareOp, comparator, proto.getFilterIfMissing(), proto - .getLatestVersionOnly()); + return new SingleColumnValueFilter( + proto.hasColumnFamily() ? proto.getColumnFamily().toByteArray() : null, + proto.hasColumnQualifier() ? proto.getColumnQualifier().toByteArray() : null, compareOp, + comparator, proto.getFilterIfMissing(), proto.getLatestVersionOnly()); } /** - * @return true if and only if the fields of the filter that are serialized - * are equal to the corresponding fields in other. Used for testing. + * @return true if and only if the fields of the filter that are serialized are equal to the + * corresponding fields in other. Used for testing. */ @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof SingleColumnValueFilter)) return false; + if (o == this) { + return true; + } + if (!(o instanceof SingleColumnValueFilter)) { + return false; + } - SingleColumnValueFilter other = (SingleColumnValueFilter)o; + SingleColumnValueFilter other = (SingleColumnValueFilter) o; return Bytes.equals(this.getFamily(), other.getFamily()) - && Bytes.equals(this.getQualifier(), other.getQualifier()) - && this.op.equals(other.op) - && this.getComparator().areSerializedFieldsEqual(other.getComparator()) - && this.getFilterIfMissing() == other.getFilterIfMissing() - && this.getLatestVersionOnly() == other.getLatestVersionOnly(); + && Bytes.equals(this.getQualifier(), other.getQualifier()) && this.op.equals(other.op) + && this.getComparator().areSerializedFieldsEqual(other.getComparator()) + && this.getFilterIfMissing() == other.getFilterIfMissing() + && this.getLatestVersionOnly() == other.getLatestVersionOnly(); } /** - * The only CF this filter needs is given column family. So, it's the only essential - * column in whole scan. If filterIfMissing == false, all families are essential, - * because of possibility of skipping the rows without any data in filtered CF. + * The only CF this filter needs is given column family. So, it's the only essential column in + * whole scan. If filterIfMissing == false, all families are essential, because of possibility of + * skipping the rows without any data in filtered CF. */ @Override public boolean isFamilyEssential(byte[] name) { @@ -455,9 +436,23 @@ public class SingleColumnValueFilter extends FilterBase { @Override public String toString() { - return String.format("%s (%s, %s, %s, %s)", - this.getClass().getSimpleName(), Bytes.toStringBinary(this.columnFamily), - Bytes.toStringBinary(this.columnQualifier), this.op.name(), - Bytes.toStringBinary(this.comparator.getValue())); + return String.format("%s (%s, %s, %s, %s)", this.getClass().getSimpleName(), + Bytes.toStringBinary(this.columnFamily), Bytes.toStringBinary(this.columnQualifier), + this.op.name(), Bytes.toStringBinary(this.comparator.getValue())); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(SingleColumnValueFilter.class.isInstance(obj)))) { + return false; + } + SingleColumnValueFilter f = (SingleColumnValueFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(Bytes.hashCode(this.getFamily()), Bytes.hashCode(this.getQualifier()), + this.op, this.getComparator(), this.getFilterIfMissing(), this.getLatestVersionOnly()); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java index c710548bdb..125d1c06a1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.yetus.audience.InterfaceAudience; @@ -100,7 +101,7 @@ public class SkipFilter extends FilterBase { public boolean filterRow() { return filterRow; } - + @Override public boolean hasFilterRow() { return true; @@ -111,8 +112,7 @@ public class SkipFilter extends FilterBase { */ @Override public byte[] toByteArray() throws IOException { - FilterProtos.SkipFilter.Builder builder = - FilterProtos.SkipFilter.newBuilder(); + FilterProtos.SkipFilter.Builder builder = FilterProtos.SkipFilter.newBuilder(); builder.setFilter(ProtobufUtil.toFilter(this.filter)); return builder.build().toByteArray(); } @@ -123,8 +123,7 @@ public class SkipFilter extends FilterBase { * @throws DeserializationException * @see #toByteArray */ - public static SkipFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static SkipFilter parseFrom(final byte[] pbBytes) throws DeserializationException { FilterProtos.SkipFilter proto; try { proto = FilterProtos.SkipFilter.parseFrom(pbBytes); @@ -145,10 +144,14 @@ public class SkipFilter extends FilterBase { */ @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof SkipFilter)) return false; + if (o == this) { + return true; + } + if (!(o instanceof SkipFilter)) { + return false; + } - SkipFilter other = (SkipFilter)o; + SkipFilter other = (SkipFilter) o; return getFilter().areSerializedFieldsEqual(other.getFilter()); } @@ -161,4 +164,18 @@ public class SkipFilter extends FilterBase { public String toString() { return this.getClass().getSimpleName() + " " + this.filter.toString(); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj.getClass() == this.getClass()))) { + return false; + } + SkipFilter f = (SkipFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.filter); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java index b3a8eae499..16bbc5676b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java @@ -1,45 +1,42 @@ /** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. */ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; import java.util.TreeSet; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.PrivateCellUtil; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; - import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; +import org.apache.yetus.audience.InterfaceAudience; /** - * Filter that returns only cells whose timestamp (version) is - * in the specified list of timestamps (versions). + * Filter that returns only cells whose timestamp (version) is in the specified list of timestamps + * (versions). *

- * Note: Use of this filter overrides any time range/time stamp - * options specified using {@link org.apache.hadoop.hbase.client.Get#setTimeRange(long, long)}, + * Note: Use of this filter overrides any time range/time stamp options specified using + * {@link org.apache.hadoop.hbase.client.Get#setTimeRange(long, long)}, * {@link org.apache.hadoop.hbase.client.Scan#setTimeRange(long, long)}, - * {@link org.apache.hadoop.hbase.client.Get#setTimestamp(long)}, - * or {@link org.apache.hadoop.hbase.client.Scan#setTimestamp(long)}. + * {@link org.apache.hadoop.hbase.client.Get#setTimestamp(long)}, or + * {@link org.apache.hadoop.hbase.client.Scan#setTimestamp(long)}. */ @InterfaceAudience.Public public class TimestampsFilter extends FilterBase { @@ -61,15 +58,12 @@ public class TimestampsFilter extends FilterBase { } /** - * Constructor for filter that retains only those - * cells whose timestamp (version) is in the specified - * list of timestamps. - * + * Constructor for filter that retains only those cells whose timestamp (version) is in the + * specified list of timestamps. * @param timestamps list of timestamps that are wanted. - * @param canHint should the filter provide a seek hint? This can skip - * past delete tombstones, so it should only be used when that - * is not an issue ( no deletes, or don't care if data - * becomes visible) + * @param canHint should the filter provide a seek hint? This can skip past delete tombstones, so + * it should only be used when that is not an issue ( no deletes, or don't care if data + * becomes visible) */ public TimestampsFilter(List timestamps, boolean canHint) { for (Long timestamp : timestamps) { @@ -97,7 +91,7 @@ public class TimestampsFilter extends FilterBase { /** * Gets the minimum timestamp requested by filter. - * @return minimum timestamp requested by filter. + * @return minimum timestamp requested by filter. */ public long getMin() { return minTimestamp; @@ -127,13 +121,11 @@ public class TimestampsFilter extends FilterBase { return canHint ? ReturnCode.SEEK_NEXT_USING_HINT : ReturnCode.SKIP; } - /** - * Pick the next cell that the scanner should seek to. Since this can skip any number of cells - * any of which can be a delete this can resurect old data. - * + * Pick the next cell that the scanner should seek to. Since this can skip any number of cells any + * of which can be a delete this can resurect old data. + *

* The method will only be used if canHint was set to true while creating the filter. - * * @throws IOException This will never happen. */ @Override @@ -162,9 +154,9 @@ public class TimestampsFilter extends FilterBase { return PrivateCellUtil.createFirstOnRowColTS(currentCell, nextTimestamp); } - public static Filter createFilterFromArguments(ArrayList filterArguments) { + public static Filter createFilterFromArguments(ArrayList filterArguments) { ArrayList timestamps = new ArrayList<>(filterArguments.size()); - for (int i = 0; ibytes * @see #toByteArray */ - public static TimestampsFilter parseFrom(final byte[] pbBytes) - throws DeserializationException { + public static TimestampsFilter parseFrom(final byte[] pbBytes) throws DeserializationException { FilterProtos.TimestampsFilter proto; try { proto = FilterProtos.TimestampsFilter.parseFrom(pbBytes); @@ -203,15 +192,19 @@ public class TimestampsFilter extends FilterBase { /** * @param o the other filter to compare with - * @return true if and only if the fields of the filter that are serialized - * are equal to the corresponding fields in other. Used for testing. + * @return true if and only if the fields of the filter that are serialized are equal to the + * corresponding fields in other. Used for testing. */ @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof TimestampsFilter)) return false; + if (o == this) { + return true; + } + if (!(o instanceof TimestampsFilter)) { + return false; + } - TimestampsFilter other = (TimestampsFilter)o; + TimestampsFilter other = (TimestampsFilter) o; return this.getTimestamps().equals(other.getTimestamps()); } @@ -235,7 +228,21 @@ public class TimestampsFilter extends FilterBase { } } - return String.format("%s (%d/%d): [%s] canHint: [%b]", this.getClass().getSimpleName(), - count, this.timestamps.size(), tsList.toString(), canHint); + return String.format("%s (%d/%d): [%s] canHint: [%b]", this.getClass().getSimpleName(), count, + this.timestamps.size(), tsList.toString(), canHint); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof TimestampsFilter))) { + return false; + } + TimestampsFilter f = (TimestampsFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.getTimestamps().toArray()); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java index 3faa111d11..e4c8a84c31 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter; import java.io.IOException; import java.util.ArrayList; - +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; import org.apache.yetus.audience.InterfaceAudience; @@ -139,4 +139,18 @@ public class ValueFilter extends CompareFilter { return super.areSerializedFieldsEqual(o); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof ValueFilter))) { + return false; + } + ValueFilter f = (ValueFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.getComparator(), this.getCompareOperator()); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java index 6c1a47f215..251c768780 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java @@ -1,40 +1,32 @@ /* - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable + * law or agreed to in writing, software distributed under the License is distributed on an "AS IS" + * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License + * for the specific language governing permissions and limitations under the License. */ package org.apache.hadoop.hbase.filter; import java.io.IOException; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; +import org.apache.yetus.audience.InterfaceAudience; /** - * A wrapper filter that returns true from {@link #filterAllRemaining()} as soon - * as the wrapped filters {@link Filter#filterRowKey(byte[], int, int)}, + * A wrapper filter that returns true from {@link #filterAllRemaining()} as soon as the wrapped + * filters {@link Filter#filterRowKey(byte[], int, int)}, * {@link Filter#filterCell(org.apache.hadoop.hbase.Cell)}, * {@link org.apache.hadoop.hbase.filter.Filter#filterRow()} or - * {@link org.apache.hadoop.hbase.filter.Filter#filterAllRemaining()} methods - * returns true. + * {@link org.apache.hadoop.hbase.filter.Filter#filterAllRemaining()} methods returns true. */ @InterfaceAudience.Public public class WhileMatchFilter extends FilterBase { @@ -72,7 +64,9 @@ public class WhileMatchFilter extends FilterBase { @Override public boolean filterRowKey(Cell cell) throws IOException { - if (filterAllRemaining()) return true; + if (filterAllRemaining()) { + return true; + } boolean value = filter.filterRowKey(cell); changeFAR(value); return value; @@ -102,7 +96,7 @@ public class WhileMatchFilter extends FilterBase { changeFAR(filterRow); return filterRow; } - + @Override public boolean hasFilterRow() { return true; @@ -113,8 +107,7 @@ public class WhileMatchFilter extends FilterBase { */ @Override public byte[] toByteArray() throws IOException { - FilterProtos.WhileMatchFilter.Builder builder = - FilterProtos.WhileMatchFilter.newBuilder(); + FilterProtos.WhileMatchFilter.Builder builder = FilterProtos.WhileMatchFilter.newBuilder(); builder.setFilter(ProtobufUtil.toFilter(this.filter)); return builder.build().toByteArray(); } @@ -125,8 +118,7 @@ public class WhileMatchFilter extends FilterBase { * @throws org.apache.hadoop.hbase.exceptions.DeserializationException * @see #toByteArray */ - public static WhileMatchFilter parseFrom(final byte [] pbBytes) - throws DeserializationException { + public static WhileMatchFilter parseFrom(final byte[] pbBytes) throws DeserializationException { FilterProtos.WhileMatchFilter proto; try { proto = FilterProtos.WhileMatchFilter.parseFrom(pbBytes); @@ -142,15 +134,19 @@ public class WhileMatchFilter extends FilterBase { /** * @param o the other filter to compare with - * @return true if and only if the fields of the filter that are serialized - * are equal to the corresponding fields in other. Used for testing. + * @return true if and only if the fields of the filter that are serialized are equal to the + * corresponding fields in other. Used for testing. */ @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) return true; - if (!(o instanceof WhileMatchFilter)) return false; + if (o == this) { + return true; + } + if (!(o instanceof WhileMatchFilter)) { + return false; + } - WhileMatchFilter other = (WhileMatchFilter)o; + WhileMatchFilter other = (WhileMatchFilter) o; return getFilter().areSerializedFieldsEqual(other.getFilter()); } @@ -163,4 +159,18 @@ public class WhileMatchFilter extends FilterBase { public String toString() { return this.getClass().getSimpleName() + " " + this.filter.toString(); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj.getClass() == this.getClass()))) { + return false; + } + WhileMatchFilter f = (WhileMatchFilter) obj; + return this.areSerializedFieldsEqual(f); + } + + @Override + public int hashCode() { + return Objects.hash(this.filter); + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java index 24f750af04..c95f7bcc90 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.security.access; import java.io.IOException; import java.util.Map; - +import java.util.Objects; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -173,4 +173,25 @@ class AccessControlFilter extends FilterBase { throw new UnsupportedOperationException( "Serialization not supported. Intended for server-side use only."); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj.getClass() == this.getClass()))) { + return false; + } + if(this == obj){ + return true; + } + AccessControlFilter f=(AccessControlFilter)obj; + return this.authManager.equals(f.authManager) + &&this.table.equals(f.table) + &&this.user.equals(f.user) + &&this.strategy.equals(f.strategy) + &&this.cfVsMaxVersions.equals(f.cfVsMaxVersions); + } + + @Override + public int hashCode() { + return Objects.hash(this.authManager,this.table,this.strategy,this.user,this.cfVsMaxVersions); + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java index 4f00e7daa3..0ebaf09c46 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java @@ -1038,6 +1038,23 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso deleteCellVisTagsFormat); return matchFound ? ReturnCode.INCLUDE : ReturnCode.SKIP; } + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj.getClass() == this.getClass()))) { + return false; + } + if(this == obj){ + return true; + } + DeleteVersionVisibilityExpressionFilter f=(DeleteVersionVisibilityExpressionFilter)obj; + return this.deleteCellVisTags.equals(f.deleteCellVisTags) + &&this.deleteCellVisTagsFormat.equals(f.deleteCellVisTagsFormat); + } + + @Override + public int hashCode() { + return Objects.hash(this.deleteCellVisTags,this.deleteCellVisTagsFormat); + } } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java index 2bde9b59b1..d7eb6e5ac3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.security.visibility; import java.io.IOException; import java.util.Map; - +import java.util.Objects; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -90,4 +90,21 @@ class VisibilityLabelFilter extends FilterBase { this.curFamilyMaxVersions = 0; this.curQualMetVersions = 0; } + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj.getClass() == this.getClass()))) { + return false; + } + if(this == obj){ + return true; + } + VisibilityLabelFilter f=(VisibilityLabelFilter)obj; + return this.expEvaluator.equals(f.expEvaluator) + &&this.cfVsMaxVersions.equals(f.cfVsMaxVersions); + } + + @Override + public int hashCode() { + return Objects.hash(this.expEvaluator,this.cfVsMaxVersions); + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/ColumnCountOnRowFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/ColumnCountOnRowFilter.java index 5bede2ad11..5735a981c0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/ColumnCountOnRowFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/ColumnCountOnRowFilter.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.client; import java.io.IOException; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.yetus.audience.InterfaceAudience; @@ -55,4 +56,21 @@ public final class ColumnCountOnRowFilter extends FilterBase { public static ColumnCountOnRowFilter parseFrom(byte[] bytes) throws DeserializationException { return new ColumnCountOnRowFilter(Bytes.toInt(bytes)); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj.getClass() == this.getClass()))) { + return false; + } + if (this == obj) { + return true; + } + ColumnCountOnRowFilter f = (ColumnCountOnRowFilter) obj; + return this.limit == f.limit; + } + + @Override + public int hashCode() { + return Objects.hash(this.limit); + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java index 40628db3e3..e2a70b23b3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java @@ -54,11 +54,14 @@ public class FilterAllFilter extends FilterBase { @Override boolean areSerializedFieldsEqual(Filter o) { - if (o == this) + if (o == this) { return true; - if (!(o instanceof FilterAllFilter)) + } + if (!(o instanceof FilterAllFilter)) { return false; + } return true; } + } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java index a3e3359c1d..e6d73fb33e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java @@ -25,6 +25,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; + import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java index 5f9515ac4d..3cd1793de3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java @@ -1,64 +1,47 @@ /** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + *

http://www.apache.org/licenses/LICENSE-2.0 + *

Unless required by applicable law or agreed to in writing, software distributed under the + * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellComparator; -import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.Filter.ReturnCode; import org.apache.hadoop.hbase.filter.FilterList.Operator; +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hbase.thirdparty.com.google.common.collect.Lists; import org.junit.Assert; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.apache.hbase.thirdparty.com.google.common.collect.Lists; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; -import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +import static org.junit.Assert.*; -@Category({FilterTests.class, SmallTests.class}) -public class TestFilterList { +@Category({ FilterTests.class, SmallTests.class }) public class TestFilterList { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = + @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestFilterList.class); static final int MAX_PAGES = 2; - @Test - public void testAddFilter() throws Exception { + @Test public void testAddFilter() throws Exception { Filter filter1 = new FirstKeyOnlyFilter(); Filter filter2 = new FirstKeyOnlyFilter(); @@ -85,8 +68,7 @@ public class TestFilterList { } - @Test - public void testConstruction() { + @Test public void testConstruction() { FirstKeyOnlyFilter f1 = new FirstKeyOnlyFilter(); FirstKeyOnlyFilter f2 = new FirstKeyOnlyFilter(); f1.setReversed(true); @@ -116,12 +98,13 @@ public class TestFilterList { } catch (IllegalArgumentException e) { } } + /** * Test "must pass one" + * * @throws Exception */ - @Test - public void testMPONE() throws Exception { + @Test public void testMPONE() throws Exception { mpOneTest(getFilterMPONE()); } @@ -129,8 +112,7 @@ public class TestFilterList { List filters = new ArrayList<>(); filters.add(new PageFilter(MAX_PAGES)); filters.add(new WhileMatchFilter(new PrefixFilter(Bytes.toBytes("yyy")))); - Filter filterMPONE = - new FilterList(FilterList.Operator.MUST_PASS_ONE, filters); + Filter filterMPONE = new FilterList(FilterList.Operator.MUST_PASS_ONE, filters); return filterMPONE; } @@ -146,16 +128,15 @@ public class TestFilterList { * filterValue() calls. Eg: filter a row if it doesn't contain a specified column. * * - */ + */ filterMPONE.reset(); assertFalse(filterMPONE.filterAllRemaining()); /* Will pass both */ - byte [] rowkey = Bytes.toBytes("yyyyyyyyy"); + byte[] rowkey = Bytes.toBytes("yyyyyyyyy"); for (int i = 0; i < MAX_PAGES - 1; i++) { assertFalse(filterMPONE.filterRowKey(KeyValueUtil.createFirstOnRow(rowkey))); - KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(i), - Bytes.toBytes(i)); + KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(i), Bytes.toBytes(i)); assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterCell(kv)); assertFalse(filterMPONE.filterRow()); } @@ -163,16 +144,14 @@ public class TestFilterList { /* Only pass PageFilter */ rowkey = Bytes.toBytes("z"); assertFalse(filterMPONE.filterRowKey(KeyValueUtil.createFirstOnRow(rowkey))); - KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(0), - Bytes.toBytes(0)); + KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(0), Bytes.toBytes(0)); assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterCell(kv)); assertFalse(filterMPONE.filterRow()); /* reach MAX_PAGES already, should filter any rows */ rowkey = Bytes.toBytes("yyy"); assertTrue(filterMPONE.filterRowKey(KeyValueUtil.createFirstOnRow(rowkey))); - kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(0), - Bytes.toBytes(0)); + kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(0), Bytes.toBytes(0)); assertFalse(Filter.ReturnCode.INCLUDE == filterMPONE.filterCell(kv)); assertFalse(filterMPONE.filterRow()); @@ -184,10 +163,10 @@ public class TestFilterList { /** * Test "must pass all" + * * @throws Exception */ - @Test - public void testMPALL() throws Exception { + @Test public void testMPALL() throws Exception { mpAllTest(getMPALLFilter()); } @@ -195,8 +174,7 @@ public class TestFilterList { List filters = new ArrayList<>(); filters.add(new PageFilter(MAX_PAGES)); filters.add(new WhileMatchFilter(new PrefixFilter(Bytes.toBytes("yyy")))); - Filter filterMPALL = - new FilterList(FilterList.Operator.MUST_PASS_ALL, filters); + Filter filterMPALL = new FilterList(FilterList.Operator.MUST_PASS_ALL, filters); return filterMPALL; } @@ -212,14 +190,13 @@ public class TestFilterList { * filterValue() calls. Eg: filter a row if it doesn't contain a specified column. * * - */ + */ filterMPALL.reset(); assertFalse(filterMPALL.filterAllRemaining()); - byte [] rowkey = Bytes.toBytes("yyyyyyyyy"); + byte[] rowkey = Bytes.toBytes("yyyyyyyyy"); for (int i = 0; i < MAX_PAGES - 1; i++) { assertFalse(filterMPALL.filterRowKey(KeyValueUtil.createFirstOnRow(rowkey))); - KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(i), - Bytes.toBytes(i)); + KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(i), Bytes.toBytes(i)); assertTrue(Filter.ReturnCode.INCLUDE == filterMPALL.filterCell(kv)); } filterMPALL.reset(); @@ -232,10 +209,10 @@ public class TestFilterList { /** * Test list ordering + * * @throws Exception */ - @Test - public void testOrdering() throws Exception { + @Test public void testOrdering() throws Exception { orderingTest(getOrderingFilter()); } @@ -243,8 +220,7 @@ public class TestFilterList { List filters = new ArrayList<>(); filters.add(new PrefixFilter(Bytes.toBytes("yyy"))); filters.add(new PageFilter(MAX_PAGES)); - Filter filterMPONE = - new FilterList(FilterList.Operator.MUST_PASS_ONE, filters); + Filter filterMPONE = new FilterList(FilterList.Operator.MUST_PASS_ONE, filters); return filterMPONE; } @@ -260,17 +236,16 @@ public class TestFilterList { * filterValue() calls. Eg: filter a row if it doesn't contain a specified column. * * - */ + */ filterMPONE.reset(); assertFalse(filterMPONE.filterAllRemaining()); /* We should be able to fill MAX_PAGES without incrementing page counter */ - byte [] rowkey = Bytes.toBytes("yyyyyyyy"); + byte[] rowkey = Bytes.toBytes("yyyyyyyy"); for (int i = 0; i < MAX_PAGES; i++) { assertFalse(filterMPONE.filterRowKey(KeyValueUtil.createFirstOnRow(rowkey))); - KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(i), - Bytes.toBytes(i)); - assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterCell(kv)); + KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(i), Bytes.toBytes(i)); + assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterCell(kv)); assertFalse(filterMPONE.filterRow()); } @@ -278,9 +253,8 @@ public class TestFilterList { rowkey = Bytes.toBytes("xxxxxxx"); for (int i = 0; i < MAX_PAGES; i++) { assertFalse(filterMPONE.filterRowKey(KeyValueUtil.createFirstOnRow(rowkey))); - KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(i), - Bytes.toBytes(i)); - assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterCell(kv)); + KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(i), Bytes.toBytes(i)); + assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterCell(kv)); assertFalse(filterMPONE.filterRow()); } @@ -288,9 +262,8 @@ public class TestFilterList { rowkey = Bytes.toBytes("yyy"); for (int i = 0; i < MAX_PAGES; i++) { assertFalse(filterMPONE.filterRowKey(KeyValueUtil.createFirstOnRow(rowkey))); - KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(i), - Bytes.toBytes(i)); - assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterCell(kv)); + KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(i), Bytes.toBytes(i)); + assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterCell(kv)); assertFalse(filterMPONE.filterRow()); } } @@ -298,10 +271,10 @@ public class TestFilterList { /** * When we do a "MUST_PASS_ONE" (a logical 'OR') of the above two filters * we expect to get the same result as the 'prefix' only result. + * * @throws Exception */ - @Test - public void testFilterListTwoFiltersMustPassOne() throws Exception { + @Test public void testFilterListTwoFiltersMustPassOne() throws Exception { byte[] r1 = Bytes.toBytes("Row1"); byte[] r11 = Bytes.toBytes("Row11"); byte[] r2 = Bytes.toBytes("Row2"); @@ -331,10 +304,10 @@ public class TestFilterList { /** * When we do a "MUST_PASS_ONE" (a logical 'OR') of the two filters * we expect to get the same result as the inclusive stop result. + * * @throws Exception */ - @Test - public void testFilterListWithInclusiveStopFilterMustPassOne() throws Exception { + @Test public void testFilterListWithInclusiveStopFilterMustPassOne() throws Exception { byte[] r1 = Bytes.toBytes("Row1"); byte[] r11 = Bytes.toBytes("Row11"); byte[] r2 = Bytes.toBytes("Row2"); @@ -356,8 +329,7 @@ public class TestFilterList { super(); } - @Override - public ReturnCode filterCell(final Cell v) { + @Override public ReturnCode filterCell(final Cell v) { return ReturnCode.NEXT_COL; } @@ -369,15 +341,14 @@ public class TestFilterList { /** * Test serialization + * * @throws Exception */ - @Test - public void testSerialization() throws Exception { + @Test public void testSerialization() throws Exception { List filters = new ArrayList<>(); filters.add(new PageFilter(MAX_PAGES)); filters.add(new WhileMatchFilter(new PrefixFilter(Bytes.toBytes("yyy")))); - Filter filterMPALL = - new FilterList(FilterList.Operator.MUST_PASS_ALL, filters); + Filter filterMPALL = new FilterList(FilterList.Operator.MUST_PASS_ALL, filters); // Decompose filterMPALL to bytes. byte[] buffer = filterMPALL.toByteArray(); @@ -393,13 +364,12 @@ public class TestFilterList { /** * Test filterCell logic. + * * @throws Exception */ - @Test - public void testFilterCell() throws Exception { + @Test public void testFilterCell() throws Exception { Filter includeFilter = new FilterBase() { - @Override - public Filter.ReturnCode filterCell(final Cell v) { + @Override public Filter.ReturnCode filterCell(final Cell v) { return Filter.ReturnCode.INCLUDE; } }; @@ -407,10 +377,9 @@ public class TestFilterList { Filter alternateFilter = new FilterBase() { boolean returnInclude = true; - @Override - public Filter.ReturnCode filterCell(final Cell v) { - Filter.ReturnCode returnCode = returnInclude ? Filter.ReturnCode.INCLUDE : - Filter.ReturnCode.SKIP; + @Override public Filter.ReturnCode filterCell(final Cell v) { + Filter.ReturnCode returnCode = + returnInclude ? Filter.ReturnCode.INCLUDE : Filter.ReturnCode.SKIP; returnInclude = !returnInclude; return returnCode; } @@ -419,10 +388,9 @@ public class TestFilterList { Filter alternateIncludeFilter = new FilterBase() { boolean returnIncludeOnly = false; - @Override - public Filter.ReturnCode filterCell(final Cell v) { - Filter.ReturnCode returnCode = returnIncludeOnly ? Filter.ReturnCode.INCLUDE : - Filter.ReturnCode.INCLUDE_AND_NEXT_COL; + @Override public Filter.ReturnCode filterCell(final Cell v) { + Filter.ReturnCode returnCode = + returnIncludeOnly ? Filter.ReturnCode.INCLUDE : Filter.ReturnCode.INCLUDE_AND_NEXT_COL; returnIncludeOnly = !returnIncludeOnly; return returnCode; } @@ -448,53 +416,47 @@ public class TestFilterList { /** * Test pass-thru of hints. */ - @Test - public void testHintPassThru() throws Exception { + @Test public void testHintPassThru() throws Exception { final KeyValue minKeyValue = new KeyValue(Bytes.toBytes(0L), null, null); - final KeyValue maxKeyValue = new KeyValue(Bytes.toBytes(Long.MAX_VALUE), - null, null); + final KeyValue maxKeyValue = new KeyValue(Bytes.toBytes(Long.MAX_VALUE), null, null); Filter filterNoHint = new FilterBase() { - @Override - public byte [] toByteArray() { + @Override public byte[] toByteArray() { return null; } - @Override - public ReturnCode filterCell(final Cell ignored) throws IOException { + @Override public ReturnCode filterCell(final Cell ignored) throws IOException { return ReturnCode.INCLUDE; } }; Filter filterMinHint = new FilterBase() { - @Override - public ReturnCode filterCell(final Cell ignored) { + @Override public ReturnCode filterCell(final Cell ignored) { return ReturnCode.SEEK_NEXT_USING_HINT; } - @Override - public Cell getNextCellHint(Cell currentKV) { + @Override public Cell getNextCellHint(Cell currentKV) { return minKeyValue; } - @Override - public byte [] toByteArray() {return null;} + @Override public byte[] toByteArray() { + return null; + } }; Filter filterMaxHint = new FilterBase() { - @Override - public ReturnCode filterCell(final Cell ignored) { + @Override public ReturnCode filterCell(final Cell ignored) { return ReturnCode.SEEK_NEXT_USING_HINT; } - @Override - public Cell getNextCellHint(Cell cell) { + @Override public Cell getNextCellHint(Cell cell) { return new KeyValue(Bytes.toBytes(Long.MAX_VALUE), null, null); } - @Override - public byte [] toByteArray() {return null;} + @Override public byte[] toByteArray() { + return null; + } }; CellComparator comparator = CellComparator.getInstance(); @@ -502,16 +464,15 @@ public class TestFilterList { // Should take the min if given two hints FilterList filterList = new FilterList(Operator.MUST_PASS_ONE, - Arrays.asList(new Filter [] { filterMinHint, filterMaxHint } )); + Arrays.asList(new Filter[] { filterMinHint, filterMaxHint })); assertEquals(0, comparator.compare(filterList.getNextCellHint(null), minKeyValue)); // Should have no hint if any filter has no hint filterList = new FilterList(Operator.MUST_PASS_ONE, - Arrays.asList( - new Filter [] { filterMinHint, filterMaxHint, filterNoHint } )); + Arrays.asList(new Filter[] { filterMinHint, filterMaxHint, filterNoHint })); assertNull(filterList.getNextCellHint(null)); filterList = new FilterList(Operator.MUST_PASS_ONE, - Arrays.asList(new Filter [] { filterNoHint, filterMaxHint } )); + Arrays.asList(new Filter[] { filterNoHint, filterMaxHint })); assertNull(filterList.getNextCellHint(null)); // Should give max hint if its the only one @@ -523,12 +484,12 @@ public class TestFilterList { // Should take the first hint filterList = new FilterList(Operator.MUST_PASS_ALL, - Arrays.asList(new Filter [] { filterMinHint, filterMaxHint } )); + Arrays.asList(new Filter[] { filterMinHint, filterMaxHint })); filterList.filterCell(null); assertEquals(0, comparator.compare(filterList.getNextCellHint(null), minKeyValue)); filterList = new FilterList(Operator.MUST_PASS_ALL, - Arrays.asList(new Filter [] { filterMaxHint, filterMinHint } )); + Arrays.asList(new Filter[] { filterMaxHint, filterMinHint })); filterList.filterCell(null); assertEquals(0, comparator.compare(filterList.getNextCellHint(null), maxKeyValue)); @@ -549,30 +510,33 @@ public class TestFilterList { /** * Tests the behavior of transform() in a hierarchical filter. - * + *

* transform() only applies after a filterCell() whose return-code includes the KeyValue. * Lazy evaluation of AND */ - @Test - public void testTransformMPO() throws Exception { + @Test public void testTransformMPO() throws Exception { // Apply the following filter: // (family=fam AND qualifier=qual1 AND KeyOnlyFilter) // OR (family=fam AND qualifier=qual2) final FilterList flist = new FilterList(Operator.MUST_PASS_ONE, Lists.newArrayList( new FilterList(Operator.MUST_PASS_ALL, Lists.newArrayList( new FamilyFilter(CompareOperator.EQUAL, new BinaryComparator(Bytes.toBytes("fam"))), - new QualifierFilter(CompareOperator.EQUAL, new BinaryComparator(Bytes.toBytes("qual1"))), - new KeyOnlyFilter())), + new QualifierFilter(CompareOperator.EQUAL, + new BinaryComparator(Bytes.toBytes("qual1"))), new KeyOnlyFilter())), new FilterList(Operator.MUST_PASS_ALL, Lists.newArrayList( new FamilyFilter(CompareOperator.EQUAL, new BinaryComparator(Bytes.toBytes("fam"))), - new QualifierFilter(CompareOperator.EQUAL, new BinaryComparator(Bytes.toBytes("qual2"))))))); + new QualifierFilter(CompareOperator.EQUAL, + new BinaryComparator(Bytes.toBytes("qual2"))))))); - final KeyValue kvQual1 = new KeyValue( - Bytes.toBytes("row"), Bytes.toBytes("fam"), Bytes.toBytes("qual1"), Bytes.toBytes("value")); - final KeyValue kvQual2 = new KeyValue( - Bytes.toBytes("row"), Bytes.toBytes("fam"), Bytes.toBytes("qual2"), Bytes.toBytes("value")); - final KeyValue kvQual3 = new KeyValue( - Bytes.toBytes("row"), Bytes.toBytes("fam"), Bytes.toBytes("qual3"), Bytes.toBytes("value")); + final KeyValue kvQual1 = + new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam"), Bytes.toBytes("qual1"), + Bytes.toBytes("value")); + final KeyValue kvQual2 = + new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam"), Bytes.toBytes("qual2"), + Bytes.toBytes("value")); + final KeyValue kvQual3 = + new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam"), Bytes.toBytes("qual3"), + Bytes.toBytes("value")); // Value for fam:qual1 should be stripped: assertEquals(Filter.ReturnCode.INCLUDE, flist.filterCell(kvQual1)); @@ -582,24 +546,27 @@ public class TestFilterList { // Value for fam:qual2 should not be stripped: assertEquals(Filter.ReturnCode.INCLUDE, flist.filterCell(kvQual2)); final KeyValue transformedQual2 = KeyValueUtil.ensureKeyValue(flist.transformCell(kvQual2)); - assertEquals("value", Bytes.toString(transformedQual2.getValueArray(), - transformedQual2.getValueOffset(), transformedQual2.getValueLength())); + assertEquals("value", Bytes + .toString(transformedQual2.getValueArray(), transformedQual2.getValueOffset(), + transformedQual2.getValueLength())); // Other keys should be skipped: assertEquals(Filter.ReturnCode.SKIP, flist.filterCell(kvQual3)); } - @Test - public void testWithMultiVersionsInSameRow() throws Exception { + @Test public void testWithMultiVersionsInSameRow() throws Exception { FilterList filterList01 = new FilterList(Operator.MUST_PASS_ONE, new ColumnPaginationFilter(1, 0)); - KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam"), Bytes.toBytes("qual"), - 1, Bytes.toBytes("value")); - KeyValue kv2 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam"), Bytes.toBytes("qual"), - 2, Bytes.toBytes("value")); - KeyValue kv3 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam"), Bytes.toBytes("qual"), - 3, Bytes.toBytes("value")); + KeyValue kv1 = + new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam"), Bytes.toBytes("qual"), 1, + Bytes.toBytes("value")); + KeyValue kv2 = + new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam"), Bytes.toBytes("qual"), 2, + Bytes.toBytes("value")); + KeyValue kv3 = + new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam"), Bytes.toBytes("qual"), 3, + Bytes.toBytes("value")); assertEquals(ReturnCode.INCLUDE_AND_NEXT_COL, filterList01.filterCell(kv1)); assertEquals(ReturnCode.SKIP, filterList01.filterCell(kv2)); @@ -613,8 +580,7 @@ public class TestFilterList { assertEquals(ReturnCode.SKIP, filterList11.filterCell(kv3)); } - @Test - public void testMPONEWithSeekNextUsingHint() throws Exception { + @Test public void testMPONEWithSeekNextUsingHint() throws Exception { byte[] col = Bytes.toBytes("c"); FilterList filterList = new FilterList(Operator.MUST_PASS_ONE, new ColumnPaginationFilter(1, col)); @@ -642,15 +608,13 @@ public class TestFilterList { this.targetRetCode = targetRetCode; } - @Override - public ReturnCode filterCell(final Cell v) throws IOException { + @Override public ReturnCode filterCell(final Cell v) throws IOException { this.didCellPassToTheFilter = true; return targetRetCode; } } - @Test - public void testShouldPassCurrentCellToFilter() throws IOException { + @Test public void testShouldPassCurrentCellToFilter() throws IOException { KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam"), Bytes.toBytes("a"), 1, Bytes.toBytes("value")); KeyValue kv2 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam"), Bytes.toBytes("a"), 2, @@ -725,8 +689,7 @@ public class TestFilterList { assertTrue(mockFilter.didCellPassToTheFilter); } - @Test - public void testTheMaximalRule() throws IOException { + @Test public void testTheMaximalRule() throws IOException { KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam"), Bytes.toBytes("a"), 1, Bytes.toBytes("value")); MockFilter filter1 = new MockFilter(ReturnCode.INCLUDE); @@ -763,8 +726,7 @@ public class TestFilterList { assertEquals(ReturnCode.NEXT_ROW, filterList.filterCell(kv1)); } - @Test - public void testTheMinimalRule() throws IOException { + @Test public void testTheMinimalRule() throws IOException { KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam"), Bytes.toBytes("a"), 1, Bytes.toBytes("value")); MockFilter filter1 = new MockFilter(ReturnCode.INCLUDE); @@ -810,19 +772,16 @@ public class TestFilterList { this.returnCell = returnCell; } - @Override - public ReturnCode filterCell(final Cell v) throws IOException { + @Override public ReturnCode filterCell(final Cell v) throws IOException { return ReturnCode.SEEK_NEXT_USING_HINT; } - @Override - public Cell getNextCellHint(Cell currentCell) throws IOException { + @Override public Cell getNextCellHint(Cell currentCell) throws IOException { return this.returnCell; } } - @Test - public void testReversedFilterListWithMockSeekHintFilter() throws IOException { + @Test public void testReversedFilterListWithMockSeekHintFilter() throws IOException { KeyValue kv1 = new KeyValue(Bytes.toBytes("row1"), Bytes.toBytes("fam"), Bytes.toBytes("a"), 1, Bytes.toBytes("value")); KeyValue kv2 = new KeyValue(Bytes.toBytes("row2"), Bytes.toBytes("fam"), Bytes.toBytes("a"), 1, @@ -855,8 +814,7 @@ public class TestFilterList { Assert.assertEquals(kv1, filterList.getNextCellHint(kv1)); } - @Test - public void testReversedFilterListWithOR() throws IOException { + @Test public void testReversedFilterListWithOR() throws IOException { byte[] r22 = Bytes.toBytes("Row22"); byte[] r2 = Bytes.toBytes("Row2"); byte[] r1 = Bytes.toBytes("Row1"); @@ -891,13 +849,14 @@ public class TestFilterList { assertEquals(ReturnCode.NEXT_COL, filterList.filterCell(new KeyValue(r1, r1, r1))); } - @Test - public void testKeyOnlyFilterTransformCell() throws IOException { + @Test public void testKeyOnlyFilterTransformCell() throws IOException { Cell c; - KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("cf"), Bytes.toBytes("column1"), - 1, Bytes.toBytes("value1")); - KeyValue kv2 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("cf"), Bytes.toBytes("column1"), - 2, Bytes.toBytes("value2")); + KeyValue kv1 = + new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("cf"), Bytes.toBytes("column1"), 1, + Bytes.toBytes("value1")); + KeyValue kv2 = + new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("cf"), Bytes.toBytes("column1"), 2, + Bytes.toBytes("value2")); Filter filter1 = new SingleColumnValueFilter(Bytes.toBytes("cf"), Bytes.toBytes("column1"), CompareOperator.EQUAL, Bytes.toBytes("value1")); @@ -926,10 +885,10 @@ public class TestFilterList { assertEquals(0, c.getValueLength()); } - @Test - public void testEmptyFilterListTransformCell() throws IOException { - KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("cf"), Bytes.toBytes("column1"), - 1, Bytes.toBytes("value")); + @Test public void testEmptyFilterListTransformCell() throws IOException { + KeyValue kv = + new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("cf"), Bytes.toBytes("column1"), 1, + Bytes.toBytes("value")); FilterList filterList = new FilterList(Operator.MUST_PASS_ALL); assertEquals(ReturnCode.INCLUDE, filterList.filterCell(kv)); assertEquals(kv, filterList.transformCell(kv)); @@ -942,8 +901,7 @@ public class TestFilterList { private static class MockNextRowFilter extends FilterBase { private int hitCount = 0; - @Override - public ReturnCode filterCell(final Cell v) throws IOException { + @Override public ReturnCode filterCell(final Cell v) throws IOException { hitCount++; return ReturnCode.NEXT_ROW; } @@ -953,8 +911,7 @@ public class TestFilterList { } } - @Test - public void testRowCountFilter() throws IOException { + @Test public void testRowCountFilter() throws IOException { KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam1"), Bytes.toBytes("a"), 1, Bytes.toBytes("value")); KeyValue kv2 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam2"), Bytes.toBytes("a"), 2, @@ -974,13 +931,11 @@ public class TestFilterList { this.targetRetCode = targetRetCode; } - @Override - public ReturnCode filterCell(final Cell v) throws IOException { + @Override public ReturnCode filterCell(final Cell v) throws IOException { return targetRetCode; } - @Override - public Cell transformCell(Cell c) throws IOException { + @Override public Cell transformCell(Cell c) throws IOException { transformed = true; return super.transformCell(c); } @@ -988,10 +943,24 @@ public class TestFilterList { public boolean getTransformed() { return this.transformed; } + + @Override public boolean equals(Object obj) { + if (obj == null || (!(obj.getClass() == this.getClass()))) { + return false; + } + if (this == obj) { + return true; + } + TransformFilter f = (TransformFilter) obj; + return this.targetRetCode.equals(f.targetRetCode); + } + + @Override public int hashCode() { + return Objects.hash(this.targetRetCode); + } } - @Test - public void testTransformCell() throws IOException { + @Test public void testTransformCell() throws IOException { KeyValue kv = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("cf"), Bytes.toBytes("column1"), 1, Bytes.toBytes("value")); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java index 815643d441..bf0f71b342 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSwitchToStreamRead.java @@ -6,9 +6,9 @@ * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

* Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,7 +24,9 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; import java.util.concurrent.ThreadLocalRandom; + import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -53,7 +55,7 @@ public class TestSwitchToStreamRead { @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSwitchToStreamRead.class); + HBaseClassTestRule.forClass(TestSwitchToStreamRead.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); @@ -69,26 +71,27 @@ public class TestSwitchToStreamRead { @BeforeClass public static void setUp() throws IOException { - UTIL.getConfiguration().setLong(StoreScanner.STORESCANNER_PREAD_MAX_BYTES, 2048); + UTIL.getConfiguration() + .setLong(StoreScanner.STORESCANNER_PREAD_MAX_BYTES, 2048); StringBuilder sb = new StringBuilder(256); for (int i = 0; i < 255; i++) { sb.append((char) ThreadLocalRandom.current().nextInt('A', 'z' + 1)); } VALUE_PREFIX = sb.append("-").toString(); REGION = UTIL.createLocalHRegion( - TableDescriptorBuilder.newBuilder(TABLE_NAME) - .setColumnFamily( - ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setBlocksize(1024).build()) - .build(), - null, null); + TableDescriptorBuilder.newBuilder(TABLE_NAME) + .setColumnFamily( + ColumnFamilyDescriptorBuilder.newBuilder(FAMILY) + .setBlocksize(1024).build()).build(), + null, null); for (int i = 0; i < 900; i++) { - REGION - .put(new Put(Bytes.toBytes(i)).addColumn(FAMILY, QUAL, Bytes.toBytes(VALUE_PREFIX + i))); + REGION.put(new Put(Bytes.toBytes(i)) + .addColumn(FAMILY, QUAL, Bytes.toBytes(VALUE_PREFIX + i))); } REGION.flush(true); for (int i = 900; i < 1000; i++) { - REGION - .put(new Put(Bytes.toBytes(i)).addColumn(FAMILY, QUAL, Bytes.toBytes(VALUE_PREFIX + i))); + REGION.put(new Put(Bytes.toBytes(i)) + .addColumn(FAMILY, QUAL, Bytes.toBytes(VALUE_PREFIX + i))); } } @@ -102,7 +105,8 @@ public class TestSwitchToStreamRead { public void test() throws IOException { try (RegionScannerImpl scanner = REGION.getScanner(new Scan())) { StoreScanner storeScanner = - (StoreScanner) (scanner).getStoreHeapForTesting().getCurrentForTesting(); + (StoreScanner) (scanner).getStoreHeapForTesting() + .getCurrentForTesting(); for (KeyValueScanner kvs : storeScanner.getAllScannersForTesting()) { if (kvs instanceof StoreFileScanner) { StoreFileScanner sfScanner = (StoreFileScanner) kvs; @@ -114,7 +118,8 @@ public class TestSwitchToStreamRead { for (int i = 0; i < 500; i++) { assertTrue(scanner.next(cells)); Result result = Result.create(cells); - assertEquals(VALUE_PREFIX + i, Bytes.toString(result.getValue(FAMILY, QUAL))); + assertEquals(VALUE_PREFIX + i, + Bytes.toString(result.getValue(FAMILY, QUAL))); cells.clear(); scanner.shipped(); } @@ -128,7 +133,8 @@ public class TestSwitchToStreamRead { for (int i = 500; i < 1000; i++) { assertEquals(i != 999, scanner.next(cells)); Result result = Result.create(cells); - assertEquals(VALUE_PREFIX + i, Bytes.toString(result.getValue(FAMILY, QUAL))); + assertEquals(VALUE_PREFIX + i, + Bytes.toString(result.getValue(FAMILY, QUAL))); cells.clear(); scanner.shipped(); } @@ -145,12 +151,27 @@ public class TestSwitchToStreamRead { public boolean filterRowKey(Cell cell) throws IOException { return Bytes.toInt(cell.getRowArray(), cell.getRowOffset()) != 999; } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj.getClass() == this.getClass()))) { + return false; + } + return this == obj; + } + + @Override + public int hashCode() { + return Objects.hash(MatchLastRowKeyFilter.class.getName()); + } } private void testFilter(Filter filter) throws IOException { - try (RegionScannerImpl scanner = REGION.getScanner(new Scan().setFilter(filter))) { + try (RegionScannerImpl scanner = REGION + .getScanner(new Scan().setFilter(filter))) { StoreScanner storeScanner = - (StoreScanner) (scanner).getStoreHeapForTesting().getCurrentForTesting(); + (StoreScanner) (scanner).getStoreHeapForTesting() + .getCurrentForTesting(); for (KeyValueScanner kvs : storeScanner.getAllScannersForTesting()) { if (kvs instanceof StoreFileScanner) { StoreFileScanner sfScanner = (StoreFileScanner) kvs; @@ -160,8 +181,8 @@ public class TestSwitchToStreamRead { } List cells = new ArrayList<>(); // should return before finishing the scan as we want to switch from pread to stream - assertTrue(scanner.next(cells, - ScannerContext.newBuilder().setTimeLimit(LimitScope.BETWEEN_CELLS, -1).build())); + assertTrue(scanner.next(cells, ScannerContext.newBuilder() + .setTimeLimit(LimitScope.BETWEEN_CELLS, -1).build())); assertTrue(cells.isEmpty()); scanner.shipped(); @@ -172,10 +193,11 @@ public class TestSwitchToStreamRead { assertFalse(sfScanner.getReader().shared); } } - assertFalse(scanner.next(cells, - ScannerContext.newBuilder().setTimeLimit(LimitScope.BETWEEN_CELLS, -1).build())); + assertFalse(scanner.next(cells, ScannerContext.newBuilder() + .setTimeLimit(LimitScope.BETWEEN_CELLS, -1).build())); Result result = Result.create(cells); - assertEquals(VALUE_PREFIX + 999, Bytes.toString(result.getValue(FAMILY, QUAL))); + assertEquals(VALUE_PREFIX + 999, + Bytes.toString(result.getValue(FAMILY, QUAL))); cells.clear(); scanner.shipped(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java index 861b83e2fe..c571020ad3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java @@ -21,6 +21,7 @@ import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; + import java.io.IOException; import java.util.ArrayList; import java.util.List; diff --git a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java index 694fb6ab8c..557934daa0 100644 --- a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java +++ b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java @@ -1,33 +1,20 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable + * law or agreed to in writing, software distributed under the License is distributed on an "AS IS" + * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License + * for the specific language governing permissions and limitations under the License. */ package org.apache.hadoop.hbase.spark; import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; - -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.exceptions.DeserializationException; -import org.apache.hadoop.hbase.filter.Filter.ReturnCode; import org.apache.hadoop.hbase.filter.FilterBase; import org.apache.hadoop.hbase.spark.datasources.BytesEncoder; import org.apache.hadoop.hbase.spark.datasources.JavaBytesEncoder; @@ -38,25 +25,25 @@ import org.apache.spark.sql.datasources.hbase.Field; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import scala.collection.mutable.MutableList; +import java.io.IOException; +import java.util.*; + /** - * This filter will push down all qualifier logic given to us - * by SparkSQL so that we have make the filters at the region server level - * and avoid sending the data back to the client to be filtered. + * This filter will push down all qualifier logic given to us by SparkSQL so that we have make the + * filters at the region server level and avoid sending the data back to the client to be filtered. */ @InterfaceAudience.Private -public class SparkSQLPushDownFilter extends FilterBase{ +public class SparkSQLPushDownFilter extends FilterBase { protected static final Logger log = LoggerFactory.getLogger(SparkSQLPushDownFilter.class); - //The following values are populated with protobuffer + // The following values are populated with protobuffer DynamicLogicExpression dynamicLogicExpression; byte[][] valueFromQueryArray; - HashMap> - currentCellToColumnIndexMap; + HashMap> currentCellToColumnIndexMap; - //The following values are transient + // The following values are transient HashMap columnToCurrentRowValueMap = null; static final byte[] rowKeyFamily = new byte[0]; @@ -65,10 +52,9 @@ public class SparkSQLPushDownFilter extends FilterBase{ String encoderClassName; public SparkSQLPushDownFilter(DynamicLogicExpression dynamicLogicExpression, - byte[][] valueFromQueryArray, - HashMap> - currentCellToColumnIndexMap, String encoderClassName) { + byte[][] valueFromQueryArray, + HashMap> currentCellToColumnIndexMap, + String encoderClassName) { this.dynamicLogicExpression = dynamicLogicExpression; this.valueFromQueryArray = valueFromQueryArray; this.currentCellToColumnIndexMap = currentCellToColumnIndexMap; @@ -76,15 +62,13 @@ public class SparkSQLPushDownFilter extends FilterBase{ } public SparkSQLPushDownFilter(DynamicLogicExpression dynamicLogicExpression, - byte[][] valueFromQueryArray, - MutableList fields, String encoderClassName) { + byte[][] valueFromQueryArray, MutableList fields, String encoderClassName) { this.dynamicLogicExpression = dynamicLogicExpression; this.valueFromQueryArray = valueFromQueryArray; this.encoderClassName = encoderClassName; - //generate family qualifier to index mapping - this.currentCellToColumnIndexMap = - new HashMap<>(); + // generate family qualifier to index mapping + this.currentCellToColumnIndexMap = new HashMap<>(); for (int i = 0; i < fields.size(); i++) { Field field = fields.apply(i); @@ -94,7 +78,7 @@ public class SparkSQLPushDownFilter extends FilterBase{ new ByteArrayComparable(cfBytes, 0, cfBytes.length); HashMap qualifierIndexMap = - currentCellToColumnIndexMap.get(familyByteComparable); + currentCellToColumnIndexMap.get(familyByteComparable); if (qualifierIndexMap == null) { qualifierIndexMap = new HashMap<>(); @@ -111,64 +95,51 @@ public class SparkSQLPushDownFilter extends FilterBase{ @Override public ReturnCode filterCell(final Cell c) throws IOException { - //If the map RowValueMap is empty then we need to populate + // If the map RowValueMap is empty then we need to populate // the row key if (columnToCurrentRowValueMap == null) { columnToCurrentRowValueMap = new HashMap<>(); - HashMap qualifierColumnMap = - currentCellToColumnIndexMap.get( - new ByteArrayComparable(rowKeyFamily, 0, rowKeyFamily.length)); + HashMap qualifierColumnMap = currentCellToColumnIndexMap + .get(new ByteArrayComparable(rowKeyFamily, 0, rowKeyFamily.length)); if (qualifierColumnMap != null) { - String rowKeyColumnName = - qualifierColumnMap.get( - new ByteArrayComparable(rowKeyQualifier, 0, - rowKeyQualifier.length)); - //Make sure that the rowKey is part of the where clause + String rowKeyColumnName = qualifierColumnMap + .get(new ByteArrayComparable(rowKeyQualifier, 0, rowKeyQualifier.length)); + // Make sure that the rowKey is part of the where clause if (rowKeyColumnName != null) { columnToCurrentRowValueMap.put(rowKeyColumnName, - new ByteArrayComparable(c.getRowArray(), - c.getRowOffset(), c.getRowLength())); + new ByteArrayComparable(c.getRowArray(), c.getRowOffset(), c.getRowLength())); } } } - //Always populate the column value into the RowValueMap + // Always populate the column value into the RowValueMap ByteArrayComparable currentFamilyByteComparable = - new ByteArrayComparable(c.getFamilyArray(), - c.getFamilyOffset(), - c.getFamilyLength()); + new ByteArrayComparable(c.getFamilyArray(), c.getFamilyOffset(), c.getFamilyLength()); HashMap qualifierColumnMap = - currentCellToColumnIndexMap.get( - currentFamilyByteComparable); + currentCellToColumnIndexMap.get(currentFamilyByteComparable); if (qualifierColumnMap != null) { - String columnName = - qualifierColumnMap.get( - new ByteArrayComparable(c.getQualifierArray(), - c.getQualifierOffset(), - c.getQualifierLength())); + String columnName = qualifierColumnMap.get(new ByteArrayComparable(c.getQualifierArray(), + c.getQualifierOffset(), c.getQualifierLength())); if (columnName != null) { columnToCurrentRowValueMap.put(columnName, - new ByteArrayComparable(c.getValueArray(), - c.getValueOffset(), c.getValueLength())); + new ByteArrayComparable(c.getValueArray(), c.getValueOffset(), c.getValueLength())); } } return ReturnCode.INCLUDE; } - @Override public boolean filterRow() throws IOException { try { boolean result = - dynamicLogicExpression.execute(columnToCurrentRowValueMap, - valueFromQueryArray); + dynamicLogicExpression.execute(columnToCurrentRowValueMap, valueFromQueryArray); columnToCurrentRowValueMap = null; return !result; } catch (Throwable e) { @@ -177,7 +148,6 @@ public class SparkSQLPushDownFilter extends FilterBase{ return false; } - /** * @param pbBytes A pb serialized instance * @return An instance of SparkSQLPushDownFilter @@ -185,7 +155,7 @@ public class SparkSQLPushDownFilter extends FilterBase{ */ @SuppressWarnings("unused") public static SparkSQLPushDownFilter parseFrom(final byte[] pbBytes) - throws DeserializationException { + throws DeserializationException { SparkFilterProtos.SQLPredicatePushDownFilter proto; try { @@ -197,48 +167,45 @@ public class SparkSQLPushDownFilter extends FilterBase{ String encoder = proto.getEncoderClassName(); BytesEncoder enc = JavaBytesEncoder.create(encoder); - //Load DynamicLogicExpression + // Load DynamicLogicExpression DynamicLogicExpression dynamicLogicExpression = - DynamicLogicExpressionBuilder.build(proto.getDynamicLogicExpression(), enc); + DynamicLogicExpressionBuilder.build(proto.getDynamicLogicExpression(), enc); - //Load valuesFromQuery + // Load valuesFromQuery final List valueFromQueryArrayList = proto.getValueFromQueryArrayList(); byte[][] valueFromQueryArray = new byte[valueFromQueryArrayList.size()][]; for (int i = 0; i < valueFromQueryArrayList.size(); i++) { valueFromQueryArray[i] = valueFromQueryArrayList.get(i).toByteArray(); } - //Load mapping from HBase family/qualifier to Spark SQL columnName - HashMap> - currentCellToColumnIndexMap = new HashMap<>(); + // Load mapping from HBase family/qualifier to Spark SQL columnName + HashMap> currentCellToColumnIndexMap = + new HashMap<>(); - for (SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping - sqlPredicatePushDownCellToColumnMapping : - proto.getCellToColumnMappingList()) { + for (SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping sqlPredicatePushDownCellToColumnMapping : proto + .getCellToColumnMappingList()) { - byte[] familyArray = - sqlPredicatePushDownCellToColumnMapping.getColumnFamily().toByteArray(); + byte[] familyArray = sqlPredicatePushDownCellToColumnMapping.getColumnFamily().toByteArray(); ByteArrayComparable familyByteComparable = - new ByteArrayComparable(familyArray, 0, familyArray.length); + new ByteArrayComparable(familyArray, 0, familyArray.length); HashMap qualifierMap = - currentCellToColumnIndexMap.get(familyByteComparable); + currentCellToColumnIndexMap.get(familyByteComparable); if (qualifierMap == null) { qualifierMap = new HashMap<>(); currentCellToColumnIndexMap.put(familyByteComparable, qualifierMap); } - byte[] qualifierArray = - sqlPredicatePushDownCellToColumnMapping.getQualifier().toByteArray(); + byte[] qualifierArray = sqlPredicatePushDownCellToColumnMapping.getQualifier().toByteArray(); ByteArrayComparable qualifierByteComparable = - new ByteArrayComparable(qualifierArray, 0 ,qualifierArray.length); + new ByteArrayComparable(qualifierArray, 0, qualifierArray.length); qualifierMap.put(qualifierByteComparable, - sqlPredicatePushDownCellToColumnMapping.getColumnName()); + sqlPredicatePushDownCellToColumnMapping.getColumnName()); } - return new SparkSQLPushDownFilter(dynamicLogicExpression, - valueFromQueryArray, currentCellToColumnIndexMap, encoder); + return new SparkSQLPushDownFilter(dynamicLogicExpression, valueFromQueryArray, + currentCellToColumnIndexMap, encoder); } /** @@ -247,31 +214,59 @@ public class SparkSQLPushDownFilter extends FilterBase{ public byte[] toByteArray() { SparkFilterProtos.SQLPredicatePushDownFilter.Builder builder = - SparkFilterProtos.SQLPredicatePushDownFilter.newBuilder(); + SparkFilterProtos.SQLPredicatePushDownFilter.newBuilder(); SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder columnMappingBuilder = - SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.newBuilder(); + SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.newBuilder(); builder.setDynamicLogicExpression(dynamicLogicExpression.toExpressionString()); - for (byte[] valueFromQuery: valueFromQueryArray) { + for (byte[] valueFromQuery : valueFromQueryArray) { builder.addValueFromQueryArray(ByteStringer.wrap(valueFromQuery)); } - for (Map.Entry> - familyEntry : currentCellToColumnIndexMap.entrySet()) { - for (Map.Entry qualifierEntry : - familyEntry.getValue().entrySet()) { - columnMappingBuilder.setColumnFamily( - ByteStringer.wrap(familyEntry.getKey().bytes())); - columnMappingBuilder.setQualifier( - ByteStringer.wrap(qualifierEntry.getKey().bytes())); + for (Map.Entry> familyEntry : currentCellToColumnIndexMap + .entrySet()) { + for (Map.Entry qualifierEntry : familyEntry.getValue() + .entrySet()) { + columnMappingBuilder.setColumnFamily(ByteStringer.wrap(familyEntry.getKey().bytes())); + columnMappingBuilder.setQualifier(ByteStringer.wrap(qualifierEntry.getKey().bytes())); columnMappingBuilder.setColumnName(qualifierEntry.getValue()); builder.addCellToColumnMapping(columnMappingBuilder.build()); } } builder.setEncoderClassName(encoderClassName); - return builder.build().toByteArray(); } + + @Override + public boolean equals(Object obj) { + if (obj == null || (!(obj instanceof SparkSQLPushDownFilter))) { + return false; + } + if (this == obj) { + return true; + } + SparkSQLPushDownFilter f = (SparkSQLPushDownFilter) obj; + if (this.valueFromQueryArray.length != f.valueFromQueryArray.length) { + return false; + } + int i = 0; + for (byte[] val : this.valueFromQueryArray) { + if (!Bytes.equals(val, f.valueFromQueryArray[i])) { + return false; + } + i++; + } + return this.dynamicLogicExpression.toExpressionString() + .equals(f.dynamicLogicExpression.toExpressionString()) + && this.currentCellToColumnIndexMap.equals(f.currentCellToColumnIndexMap) + && this.encoderClassName.equals(f.encoderClassName); + } + + @Override + public int hashCode() { + return Objects.hash(this.dynamicLogicExpression, Arrays.hashCode(this.valueFromQueryArray), + this.currentCellToColumnIndexMap, this.encoderClassName); + } }