From aed763a8ad122c84728b2efcefc10c6d8b5a51cf Mon Sep 17 00:00:00 2001 From: mbautin Date: Tue, 6 Mar 2012 14:21:18 -0800 Subject: [PATCH] Add filter info to slow query logging --- .../java/org/apache/hadoop/hbase/client/Get.java | 3 + .../java/org/apache/hadoop/hbase/client/Scan.java | 9 +- .../hadoop/hbase/filter/ColumnCountGetFilter.java | 7 +- .../hbase/filter/ColumnPaginationFilter.java | 8 +- .../hadoop/hbase/filter/ColumnPrefixFilter.java | 5 + .../hadoop/hbase/filter/ColumnRangeFilter.java | 8 + .../apache/hadoop/hbase/filter/CompareFilter.java | 9 + .../hadoop/hbase/filter/DependentColumnFilter.java | 10 + .../org/apache/hadoop/hbase/filter/FilterBase.java | 7 + .../org/apache/hadoop/hbase/filter/FilterList.java | 19 ++- .../hadoop/hbase/filter/InclusiveStopFilter.java | 7 +- .../hbase/filter/MultipleColumnPrefixFilter.java | 25 ++ .../org/apache/hadoop/hbase/filter/PageFilter.java | 7 +- .../apache/hadoop/hbase/filter/PrefixFilter.java | 7 +- .../hbase/filter/SingleColumnValueFilter.java | 8 + .../org/apache/hadoop/hbase/filter/SkipFilter.java | 5 + .../hadoop/hbase/filter/TimestampsFilter.java | 25 ++ .../hadoop/hbase/filter/WhileMatchFilter.java | 5 + .../apache/hadoop/hbase/client/TestOperation.java | 237 ++++++++++++++++++++ 19 files changed, 402 insertions(+), 9 deletions(-) diff --git src/main/java/org/apache/hadoop/hbase/client/Get.java src/main/java/org/apache/hadoop/hbase/client/Get.java index f4d3e93..2e8b795 100644 --- src/main/java/org/apache/hadoop/hbase/client/Get.java +++ src/main/java/org/apache/hadoop/hbase/client/Get.java @@ -372,6 +372,9 @@ public class Get extends OperationWithAttributes } } map.put("totalColumns", colCount); + if (this.filter != null) { + map.put("filter", this.filter.toString()); + } return map; } diff --git src/main/java/org/apache/hadoop/hbase/client/Scan.java src/main/java/org/apache/hadoop/hbase/client/Scan.java index 66f84bf..c4004de 100644 --- src/main/java/org/apache/hadoop/hbase/client/Scan.java +++ src/main/java/org/apache/hadoop/hbase/client/Scan.java @@ -308,11 +308,11 @@ public class Scan extends OperationWithAttributes implements Writable { * @param batch the maximum number of values */ public void setBatch(int batch) { - if(this.hasFilter() && this.filter.hasFilterRow()) { - throw new IncompatibleFilterException( + if (this.hasFilter() && this.filter.hasFilterRow()) { + throw new IncompatibleFilterException( "Cannot set batch on a scan using a filter" + " that returns true for filter.hasFilterRow"); - } + } this.batch = batch; } @@ -533,6 +533,9 @@ public class Scan extends OperationWithAttributes implements Writable { } } map.put("totalColumns", colCount); + if (this.filter != null) { + map.put("filter", this.filter.toString()); + } return map; } diff --git src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java index 52bc35a..dd8a11f 100644 --- src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java +++ src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java @@ -92,4 +92,9 @@ public class ColumnCountGetFilter extends FilterBase { public void write(DataOutput out) throws IOException { out.writeInt(this.limit); } -} \ No newline at end of file + + @Override + public String toString() { + return this.getClass().getSimpleName() + " " + this.limit; + } +} diff --git src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java index 9143c13..d702515 100644 --- src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java +++ src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java @@ -110,4 +110,10 @@ public class ColumnPaginationFilter extends FilterBase out.writeInt(this.limit); out.writeInt(this.offset); } -} \ No newline at end of file + + @Override + public String toString() { + return String.format("%s (%d, %d)", this.getClass().getSimpleName(), + this.limit, this.offset); + } +} diff --git src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java index f0eba02..4458b27 100644 --- src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java +++ src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java @@ -105,4 +105,9 @@ public class ColumnPrefixFilter extends FilterBase { kv.getBuffer(), kv.getRowOffset(), kv.getRowLength(), kv.getBuffer(), kv.getFamilyOffset(), kv.getFamilyLength(), prefix, 0, prefix.length); } + + @Override + public String toString() { + return this.getClass().getSimpleName() + " " + Bytes.toStringBinary(this.prefix); + } } diff --git src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java index 941cc7d..3ae9d98 100644 --- src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java +++ src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java @@ -206,4 +206,12 @@ public class ColumnRangeFilter extends FilterBase { .getFamilyLength(), this.minColumn, 0, this.minColumn == null ? 0 : this.minColumn.length); } + + @Override + public String toString() { + return this.getClass().getSimpleName() + " " + + (this.minColumnInclusive ? "[" : "(") + Bytes.toStringBinary(this.minColumn) + + ", " + Bytes.toStringBinary(this.maxColumn) + + (this.maxColumnInclusive ? "]" : ")"); + } } diff --git src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java index 3207ae4..d395ece 100644 --- src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java +++ src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java @@ -23,6 +23,7 @@ package org.apache.hadoop.hbase.filter; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.io.HbaseObjectWritable; +import org.apache.hadoop.hbase.util.Bytes; import java.io.DataInput; import java.io.DataOutput; @@ -159,4 +160,12 @@ public abstract class CompareFilter extends FilterBase { HbaseObjectWritable.writeObject(out, comparator, WritableByteArrayComparable.class, null); } + + @Override + public String toString() { + return String.format("%s (%s, %s)", + this.getClass().getSimpleName(), + this.compareOp.name(), + Bytes.toStringBinary(this.comparator.getValue())); + } } diff --git src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java index 46c20e6..6168818 100644 --- src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java +++ src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java @@ -241,4 +241,14 @@ public class DependentColumnFilter extends CompareFilter { out.writeBoolean(this.dropDependentColumn); } + @Override + public String toString() { + return String.format("%s (%s, %s, %s, %s, %s)", + this.getClass().getSimpleName(), + Bytes.toStringBinary(this.columnFamily), + Bytes.toStringBinary(this.columnQualifier), + this.dropDependentColumn, + this.compareOp.name(), + Bytes.toStringBinary(this.comparator.getValue())); + } } diff --git src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java index adb6740..514f9ae 100644 --- src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java +++ src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java @@ -143,4 +143,11 @@ public abstract class FilterBase implements Filter { public static Filter createFilterFromArguments(ArrayList filterArguments) { throw new IllegalArgumentException("This method has not been implemented"); } + + /** + * Return filter's info for debugging and logging purpose. + */ + public String toString() { + return this.getClass().getSimpleName(); + } } diff --git src/main/java/org/apache/hadoop/hbase/filter/FilterList.java src/main/java/org/apache/hadoop/hbase/filter/FilterList.java index e69d85f..3846bb1 100644 --- src/main/java/org/apache/hadoop/hbase/filter/FilterList.java +++ src/main/java/org/apache/hadoop/hbase/filter/FilterList.java @@ -55,6 +55,7 @@ public class FilterList implements Filter { } private static final Configuration conf = HBaseConfiguration.create(); + private static final int MAX_LOG_FILTERS = 5; private Operator operator = Operator.MUST_PASS_ALL; private List filters = new ArrayList(); @@ -314,4 +315,20 @@ public class FilterList implements Filter { } return keyHint; } -} \ No newline at end of file + + @Override + public String toString() { + return toString(MAX_LOG_FILTERS); + } + + protected String toString(int maxFilters) { + int endIndex = this.filters.size() < maxFilters + ? this.filters.size() : maxFilters; + return String.format("%s %s (%d/%d): %s", + this.getClass().getSimpleName(), + this.operator == Operator.MUST_PASS_ALL ? "AND" : "OR", + endIndex, + this.filters.size(), + this.filters.subList(0, endIndex).toString()); + } +} diff --git src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java index 98d17ba..ddb5bd5 100644 --- src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java +++ src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java @@ -93,4 +93,9 @@ public class InclusiveStopFilter extends FilterBase { public void readFields(DataInput in) throws IOException { this.stopRowKey = Bytes.readByteArray(in); } -} \ No newline at end of file + + @Override + public String toString() { + return this.getClass().getSimpleName() + " " + Bytes.toStringBinary(this.stopRowKey); + } +} diff --git src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java index 323894f..150bb57 100644 --- src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java +++ src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java @@ -40,6 +40,7 @@ import java.util.ArrayList; public class MultipleColumnPrefixFilter extends FilterBase { protected byte [] hint = null; protected TreeSet sortedPrefixes = createTreeSet(); + private final static int MAX_LOG_PREFIXES = 5; public MultipleColumnPrefixFilter() { super(); @@ -140,4 +141,28 @@ public class MultipleColumnPrefixFilter extends FilterBase { } }); } + + @Override + public String toString() { + return toString(MAX_LOG_PREFIXES); + } + + protected String toString(int maxPrefixes) { + StringBuilder prefixes = new StringBuilder(); + + int count = 0; + for (byte[] ba : this.sortedPrefixes) { + if (count >= maxPrefixes) { + break; + } + ++count; + prefixes.append(Bytes.toStringBinary(ba)); + if (count < this.sortedPrefixes.size() && count < maxPrefixes) { + prefixes.append(", "); + } + } + + return String.format("%s (%d/%d): [%s]", this.getClass().getSimpleName(), + count, this.sortedPrefixes.size(), prefixes.toString()); + } } diff --git src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java index f663717..dffc00e 100644 --- src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java +++ src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java @@ -92,4 +92,9 @@ public class PageFilter extends FilterBase { public void write(final DataOutput out) throws IOException { out.writeLong(pageSize); } -} \ No newline at end of file + + @Override + public String toString() { + return this.getClass().getSimpleName() + " " + this.pageSize; + } +} diff --git src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java index 2b4506e..fb68d67 100644 --- src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java +++ src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java @@ -88,4 +88,9 @@ public class PrefixFilter extends FilterBase { public void readFields(DataInput in) throws IOException { this.prefix = Bytes.readByteArray(in); } -} \ No newline at end of file + + @Override + public String toString() { + return this.getClass().getSimpleName() + " " + Bytes.toStringBinary(this.prefix); + } +} diff --git src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java index 0a24ada..3f6de40 100644 --- src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java +++ src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java @@ -310,4 +310,12 @@ public class SingleColumnValueFilter extends FilterBase { out.writeBoolean(filterIfMissing); out.writeBoolean(latestVersionOnly); } + + @Override + public String toString() { + return String.format("%s (%s, %s, %s, %s)", + this.getClass().getSimpleName(), Bytes.toStringBinary(this.columnFamily), + Bytes.toStringBinary(this.columnQualifier), this.compareOp.name(), + Bytes.toStringBinary(this.comparator.getValue())); + } } diff --git src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java index c895061..29aa46e 100644 --- src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java +++ src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java @@ -107,4 +107,9 @@ public class SkipFilter extends FilterBase { throw new RuntimeException("Failed deserialize.", e); } } + + @Override + public String toString() { + return this.getClass().getSimpleName() + " " + this.filter.toString(); + } } diff --git src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java index 84eee0d..34c2349 100644 --- src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java +++ src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java @@ -44,6 +44,7 @@ import com.google.common.base.Preconditions; public class TimestampsFilter extends FilterBase { TreeSet timestamps; + private static final int MAX_LOG_TIMESTAMPS = 5; // Used during scans to hint the scan to stop early // once the timestamps fall below the minTimeStamp. @@ -133,4 +134,28 @@ public class TimestampsFilter extends FilterBase { out.writeLong(timestamp); } } + + @Override + public String toString() { + return toString(MAX_LOG_TIMESTAMPS); + } + + protected String toString(int maxTimestamps) { + StringBuilder tsList = new StringBuilder(); + + int count = 0; + for (Long ts : this.timestamps) { + if (count >= maxTimestamps) { + break; + } + ++count; + tsList.append(ts.toString()); + if (count < this.timestamps.size() && count < maxTimestamps) { + tsList.append(", "); + } + } + + return String.format("%s (%d/%d): [%s]", this.getClass().getSimpleName(), + count, this.timestamps.size(), tsList.toString()); + } } diff --git src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java index eb3f96b..e64ad22 100644 --- src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java +++ src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java @@ -108,4 +108,9 @@ public class WhileMatchFilter extends FilterBase { throw new RuntimeException("Failed deserialize.", e); } } + + @Override + public String toString() { + return this.getClass().getSimpleName() + " " + this.filter.toString(); + } } diff --git src/test/java/org/apache/hadoop/hbase/client/TestOperation.java src/test/java/org/apache/hadoop/hbase/client/TestOperation.java index 716022d..43f856f 100644 --- src/test/java/org/apache/hadoop/hbase/client/TestOperation.java +++ src/test/java/org/apache/hadoop/hbase/client/TestOperation.java @@ -26,10 +26,36 @@ import org.apache.hadoop.hbase.SmallTests; import org.junit.Test; import java.io.IOException; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; +import org.apache.hadoop.hbase.filter.BinaryComparator; +import org.apache.hadoop.hbase.filter.ColumnCountGetFilter; +import org.apache.hadoop.hbase.filter.ColumnPaginationFilter; +import org.apache.hadoop.hbase.filter.ColumnPrefixFilter; +import org.apache.hadoop.hbase.filter.ColumnRangeFilter; +import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; +import org.apache.hadoop.hbase.filter.DependentColumnFilter; +import org.apache.hadoop.hbase.filter.FamilyFilter; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.filter.FilterList.Operator; +import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; +import org.apache.hadoop.hbase.filter.InclusiveStopFilter; +import org.apache.hadoop.hbase.filter.KeyOnlyFilter; +import org.apache.hadoop.hbase.filter.MultipleColumnPrefixFilter; +import org.apache.hadoop.hbase.filter.PageFilter; +import org.apache.hadoop.hbase.filter.PrefixFilter; +import org.apache.hadoop.hbase.filter.QualifierFilter; +import org.apache.hadoop.hbase.filter.RowFilter; +import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; +import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter; +import org.apache.hadoop.hbase.filter.SkipFilter; +import org.apache.hadoop.hbase.filter.TimestampsFilter; +import org.apache.hadoop.hbase.filter.ValueFilter; +import org.apache.hadoop.hbase.filter.WhileMatchFilter; import org.apache.hadoop.hbase.util.Bytes; import org.codehaus.jackson.map.ObjectMapper; @@ -48,6 +74,217 @@ public class TestOperation { private static ObjectMapper mapper = new ObjectMapper(); + private static List TS_LIST = Arrays.asList(2L, 3L, 5L); + private static TimestampsFilter TS_FILTER = new TimestampsFilter(TS_LIST); + private static String STR_TS_FILTER = + TS_FILTER.getClass().getSimpleName() + " (3/3): [2, 3, 5]"; + + private static List L_TS_LIST = + Arrays.asList(0L, 1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, 10L); + private static TimestampsFilter L_TS_FILTER = + new TimestampsFilter(L_TS_LIST); + private static String STR_L_TS_FILTER = + L_TS_FILTER.getClass().getSimpleName() + " (5/11): [0, 1, 2, 3, 4]"; + + private static String COL_NAME_1 = "col1"; + private static ColumnPrefixFilter COL_PRE_FILTER = + new ColumnPrefixFilter(COL_NAME_1.getBytes()); + private static String STR_COL_PRE_FILTER = + COL_PRE_FILTER.getClass().getSimpleName() + " " + COL_NAME_1; + + private static String COL_NAME_2 = "col2"; + private static ColumnRangeFilter CR_FILTER = new ColumnRangeFilter( + COL_NAME_1.getBytes(), true, COL_NAME_2.getBytes(), false); + private static String STR_CR_FILTER = CR_FILTER.getClass().getSimpleName() + + " [" + COL_NAME_1 + ", " + COL_NAME_2 + ")"; + + private static int COL_COUNT = 9; + private static ColumnCountGetFilter CCG_FILTER = + new ColumnCountGetFilter(COL_COUNT); + private static String STR_CCG_FILTER = + CCG_FILTER.getClass().getSimpleName() + " " + COL_COUNT; + + private static int LIMIT = 3; + private static int OFFSET = 4; + private static ColumnPaginationFilter CP_FILTER = + new ColumnPaginationFilter(LIMIT, OFFSET); + private static String STR_CP_FILTER = CP_FILTER.getClass().getSimpleName() + + " (" + LIMIT + ", " + OFFSET + ")"; + + private static String STOP_ROW_KEY = "stop"; + private static InclusiveStopFilter IS_FILTER = + new InclusiveStopFilter(STOP_ROW_KEY.getBytes()); + private static String STR_IS_FILTER = + IS_FILTER.getClass().getSimpleName() + " " + STOP_ROW_KEY; + + private static String PREFIX = "prefix"; + private static PrefixFilter PREFIX_FILTER = + new PrefixFilter(PREFIX.getBytes()); + private static String STR_PREFIX_FILTER = "PrefixFilter " + PREFIX; + + private static byte[][] PREFIXES = { + "0".getBytes(), "1".getBytes(), "2".getBytes()}; + private static MultipleColumnPrefixFilter MCP_FILTER = + new MultipleColumnPrefixFilter(PREFIXES); + private static String STR_MCP_FILTER = + MCP_FILTER.getClass().getSimpleName() + " (3/3): [0, 1, 2]"; + + private static byte[][] L_PREFIXES = { + "0".getBytes(), "1".getBytes(), "2".getBytes(), "3".getBytes(), + "4".getBytes(), "5".getBytes(), "6".getBytes(), "7".getBytes()}; + private static MultipleColumnPrefixFilter L_MCP_FILTER = + new MultipleColumnPrefixFilter(L_PREFIXES); + private static String STR_L_MCP_FILTER = + L_MCP_FILTER.getClass().getSimpleName() + " (5/8): [0, 1, 2, 3, 4]"; + + private static int PAGE_SIZE = 9; + private static PageFilter PAGE_FILTER = new PageFilter(PAGE_SIZE); + private static String STR_PAGE_FILTER = + PAGE_FILTER.getClass().getSimpleName() + " " + PAGE_SIZE; + + private static SkipFilter SKIP_FILTER = new SkipFilter(L_TS_FILTER); + private static String STR_SKIP_FILTER = + SKIP_FILTER.getClass().getSimpleName() + " " + STR_L_TS_FILTER; + + private static WhileMatchFilter WHILE_FILTER = + new WhileMatchFilter(L_TS_FILTER); + private static String STR_WHILE_FILTER = + WHILE_FILTER.getClass().getSimpleName() + " " + STR_L_TS_FILTER; + + private static KeyOnlyFilter KEY_ONLY_FILTER = new KeyOnlyFilter(); + private static String STR_KEY_ONLY_FILTER = + KEY_ONLY_FILTER.getClass().getSimpleName(); + + private static FirstKeyOnlyFilter FIRST_KEY_ONLY_FILTER = + new FirstKeyOnlyFilter(); + private static String STR_FIRST_KEY_ONLY_FILTER = + FIRST_KEY_ONLY_FILTER.getClass().getSimpleName(); + + private static CompareOp CMP_OP = CompareOp.EQUAL; + private static byte[] CMP_VALUE = "value".getBytes(); + private static BinaryComparator BC = new BinaryComparator(CMP_VALUE); + private static DependentColumnFilter DC_FILTER = + new DependentColumnFilter(FAMILY, QUALIFIER, true, CMP_OP, BC); + private static String STR_DC_FILTER = String.format( + "%s (%s, %s, %s, %s, %s)", DC_FILTER.getClass().getSimpleName(), + Bytes.toStringBinary(FAMILY), Bytes.toStringBinary(QUALIFIER), true, + CMP_OP.name(), Bytes.toStringBinary(BC.getValue())); + + private static FamilyFilter FAMILY_FILTER = new FamilyFilter(CMP_OP, BC); + private static String STR_FAMILY_FILTER = + FAMILY_FILTER.getClass().getSimpleName() + " (EQUAL, value)"; + + private static QualifierFilter QUALIFIER_FILTER = + new QualifierFilter(CMP_OP, BC); + private static String STR_QUALIFIER_FILTER = + QUALIFIER_FILTER.getClass().getSimpleName() + " (EQUAL, value)"; + + private static RowFilter ROW_FILTER = new RowFilter(CMP_OP, BC); + private static String STR_ROW_FILTER = + ROW_FILTER.getClass().getSimpleName() + " (EQUAL, value)"; + + private static ValueFilter VALUE_FILTER = new ValueFilter(CMP_OP, BC); + private static String STR_VALUE_FILTER = + VALUE_FILTER.getClass().getSimpleName() + " (EQUAL, value)"; + + private static SingleColumnValueFilter SCV_FILTER = + new SingleColumnValueFilter(FAMILY, QUALIFIER, CMP_OP, CMP_VALUE); + private static String STR_SCV_FILTER = String.format("%s (%s, %s, %s, %s)", + SCV_FILTER.getClass().getSimpleName(), Bytes.toStringBinary(FAMILY), + Bytes.toStringBinary(QUALIFIER), CMP_OP.name(), + Bytes.toStringBinary(CMP_VALUE)); + + private static SingleColumnValueExcludeFilter SCVE_FILTER = + new SingleColumnValueExcludeFilter(FAMILY, QUALIFIER, CMP_OP, CMP_VALUE); + private static String STR_SCVE_FILTER = String.format("%s (%s, %s, %s, %s)", + SCVE_FILTER.getClass().getSimpleName(), Bytes.toStringBinary(FAMILY), + Bytes.toStringBinary(QUALIFIER), CMP_OP.name(), + Bytes.toStringBinary(CMP_VALUE)); + + private static FilterList AND_FILTER_LIST = new FilterList( + Operator.MUST_PASS_ALL, Arrays.asList((Filter) TS_FILTER, L_TS_FILTER, + CR_FILTER)); + private static String STR_AND_FILTER_LIST = String.format( + "%s AND (3/3): [%s, %s, %s]", AND_FILTER_LIST.getClass().getSimpleName(), + STR_TS_FILTER, STR_L_TS_FILTER, STR_CR_FILTER); + + private static FilterList OR_FILTER_LIST = new FilterList( + Operator.MUST_PASS_ONE, Arrays.asList((Filter) TS_FILTER, L_TS_FILTER, + CR_FILTER)); + private static String STR_OR_FILTER_LIST = String.format( + "%s OR (3/3): [%s, %s, %s]", AND_FILTER_LIST.getClass().getSimpleName(), + STR_TS_FILTER, STR_L_TS_FILTER, STR_CR_FILTER); + + private static FilterList L_FILTER_LIST = new FilterList( + Arrays.asList((Filter) TS_FILTER, L_TS_FILTER, CR_FILTER, COL_PRE_FILTER, + CCG_FILTER, CP_FILTER, PREFIX_FILTER, PAGE_FILTER)); + private static String STR_L_FILTER_LIST = String.format( + "%s AND (5/8): [%s, %s, %s, %s, %s]", + L_FILTER_LIST.getClass().getSimpleName(), STR_TS_FILTER, STR_L_TS_FILTER, + STR_CR_FILTER, STR_COL_PRE_FILTER, STR_CCG_FILTER, STR_CP_FILTER); + + private static Filter[] FILTERS = { + TS_FILTER, // TimestampsFilter + L_TS_FILTER, // TimestampsFilter + COL_PRE_FILTER, // ColumnPrefixFilter + CP_FILTER, // ColumnPaginationFilter + CR_FILTER, // ColumnRangeFilter + CCG_FILTER, // ColumnCountGetFilter + IS_FILTER, // InclusiveStopFilter + PREFIX_FILTER, // PrefixFilter + PAGE_FILTER, // PageFilter + SKIP_FILTER, // SkipFilter + WHILE_FILTER, // WhileMatchFilter + KEY_ONLY_FILTER, // KeyOnlyFilter + FIRST_KEY_ONLY_FILTER, // FirstKeyOnlyFilter + MCP_FILTER, // MultipleColumnPrefixFilter + L_MCP_FILTER, // MultipleColumnPrefixFilter + DC_FILTER, // DependentColumnFilter + FAMILY_FILTER, // FamilyFilter + QUALIFIER_FILTER, // QualifierFilter + ROW_FILTER, // RowFilter + VALUE_FILTER, // ValueFilter + SCV_FILTER, // SingleColumnValueFilter + SCVE_FILTER, // SingleColumnValueExcludeFilter + AND_FILTER_LIST, // FilterList + OR_FILTER_LIST, // FilterList + L_FILTER_LIST, // FilterList + }; + + private static String[] FILTERS_INFO = { + STR_TS_FILTER, // TimestampsFilter + STR_L_TS_FILTER, // TimestampsFilter + STR_COL_PRE_FILTER, // ColumnPrefixFilter + STR_CP_FILTER, // ColumnPaginationFilter + STR_CR_FILTER, // ColumnRangeFilter + STR_CCG_FILTER, // ColumnCountGetFilter + STR_IS_FILTER, // InclusiveStopFilter + STR_PREFIX_FILTER, // PrefixFilter + STR_PAGE_FILTER, // PageFilter + STR_SKIP_FILTER, // SkipFilter + STR_WHILE_FILTER, // WhileMatchFilter + STR_KEY_ONLY_FILTER, // KeyOnlyFilter + STR_FIRST_KEY_ONLY_FILTER, // FirstKeyOnlyFilter + STR_MCP_FILTER, // MultipleColumnPrefixFilter + STR_L_MCP_FILTER, // MultipleColumnPrefixFilter + STR_DC_FILTER, // DependentColumnFilter + STR_FAMILY_FILTER, // FamilyFilter + STR_QUALIFIER_FILTER, // QualifierFilter + STR_ROW_FILTER, // RowFilter + STR_VALUE_FILTER, // ValueFilter + STR_SCV_FILTER, // SingleColumnValueFilter + STR_SCVE_FILTER, // SingleColumnValueExcludeFilter + STR_AND_FILTER_LIST, // FilterList + STR_OR_FILTER_LIST, // FilterList + STR_L_FILTER_LIST, // FilterList + }; + + static { + assertEquals("The sizes of static arrays do not match: " + + "[FILTERS: %d <=> FILTERS_INFO: %d]", + FILTERS.length, FILTERS_INFO.length); + } + /** * Test the client Operations' JSON encoding to ensure that produced JSON is * parseable and that the details are present and not corrupted. -- 1.7.4.4