diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BaseDelegateFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BaseDelegateFilter.java
new file mode 100644
index 0000000..658d751
--- /dev/null
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BaseDelegateFilter.java
@@ -0,0 +1,183 @@
+/*
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.filter;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
+
+import com.google.protobuf.InvalidProtocolBufferException;
+
+/**
+ * Filter that simply wraps another Filter (convenience class)
+ */
+@InterfaceAudience.Private
+public class BaseDelegateFilter extends Filter {
+ Filter filter = null;
+
+ public BaseDelegateFilter( Filter filter ) {
+ if (null == filter) {
+ // ensure the filter instance is not null
+ throw new NullPointerException("Cannot create DelegateFilter with null Filter");
+ }
+ this.filter = filter;
+ }
+
+ /**
+ * @return The filter serialized using pb
+ */
+ public byte[] toByteArray() throws IOException {
+ FilterProtos.FilterWrapper.Builder builder =
+ FilterProtos.FilterWrapper.newBuilder();
+ builder.setFilter(ProtobufUtil.toFilter(this.filter));
+ return builder.build().toByteArray();
+ }
+
+ /**
+ * @param pbBytes A pb serialized {@link FilterWrapper} instance
+ * @return An instance of {@link FilterWrapper} made from bytes
+ * @throws org.apache.hadoop.hbase.exceptions.DeserializationException
+ * @see #toByteArray
+ */
+ public static FilterWrapper parseFrom(final byte [] pbBytes)
+ throws DeserializationException {
+ FilterProtos.FilterWrapper proto;
+ try {
+ proto = FilterProtos.FilterWrapper.parseFrom(pbBytes);
+ } catch (InvalidProtocolBufferException e) {
+ throw new DeserializationException(e);
+ }
+ try {
+ return new FilterWrapper(ProtobufUtil.toFilter(proto.getFilter()));
+ } catch (IOException ioe) {
+ throw new DeserializationException(ioe);
+ }
+ }
+
+ @Override
+ public void reset() throws IOException {
+ this.filter.reset();
+ }
+
+ @Override
+ public boolean filterAllRemaining() throws IOException {
+ return this.filter.filterAllRemaining();
+ }
+
+ @Override
+ public boolean filterRow() throws IOException {
+ return this.filter.filterRow();
+ }
+
+ /**
+ * This method is deprecated and you should override Cell getNextKeyHint(Cell) instead.
+ */
+ @Override
+ @Deprecated
+ public KeyValue getNextKeyHint(KeyValue currentKV) throws IOException {
+ return KeyValueUtil.ensureKeyValue(this.filter.getNextCellHint((Cell)currentKV));
+ }
+
+ /**
+ * Old filter wrapper descendants will implement KV getNextKeyHint(KV) so we should call it.
+ */
+ @Override
+ public Cell getNextCellHint(Cell currentKV) throws IOException {
+ // Old filters based off of this class will override KeyValue getNextKeyHint(KeyValue).
+ // Thus to maintain compatibility we need to call the old version.
+ return this.getNextKeyHint(KeyValueUtil.ensureKeyValue(currentKV));
+ }
+
+ @Override
+ public boolean filterRowKey(byte[] buffer, int offset, int length) throws IOException {
+ return this.filter.filterRowKey(buffer, offset, length);
+ }
+
+ @Override
+ public ReturnCode filterKeyValue(Cell v) throws IOException {
+ return this.filter.filterKeyValue(v);
+ }
+
+ @Override
+ public Cell transformCell(Cell v) throws IOException {
+ // Old filters based off of this class will override KeyValue transform(KeyValue).
+ // Thus to maintain compatibility we need to call the old version.
+ return transform(KeyValueUtil.ensureKeyValue(v));
+ }
+
+ /**
+ * WARNING: please to not override this method. Instead override {@link #transformCell(Cell)}.
+ *
+ * This is for transition from 0.94 -> 0.96
+ */
+ @Override
+ @Deprecated
+ public KeyValue transform(KeyValue currentKV) throws IOException {
+ return KeyValueUtil.ensureKeyValue(this.filter.transformCell(currentKV));
+ }
+
+ @Override
+ public boolean hasFilterRow() {
+ return this.filter.hasFilterRow();
+ }
+
+ @Override
+ public void filterRowCells(List kvs) throws IOException {
+ this.filter.filterRowCells(kvs);
+ }
+
+ /**
+ * WARNING: please to not override this method. Instead override {@link #transformCell(Cell)}.
+ *
+ * This is for transition from 0.94 -> 0.96
+ */
+ @Override
+ @Deprecated
+ public void filterRow(List kvs) throws IOException {
+ // This is only used internally, marked InterfaceAudience.private, and not used anywhere.
+ // We can get away with not implementing this.
+ throw new UnsupportedOperationException("filterRow(List) should never be called");
+ }
+
+ @Override
+ public boolean isFamilyEssential(byte[] name) throws IOException {
+ return filter.isFamilyEssential(name);
+ }
+
+ /**
+ * @param other
+ * @return true if and only if the fields of the filter that are serialized
+ * are equal to the corresponding fields in other. Used for testing.
+ */
+ boolean areSerializedFieldsEqual(Filter o) {
+ if (o == this) return true;
+ if (!(o instanceof FilterWrapper)) return false;
+
+ FilterWrapper other = (FilterWrapper)o;
+ return this.filter.areSerializedFieldsEqual(other.filter);
+ }
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
index 1e45464..c4ff2c5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
@@ -20,19 +20,10 @@
package org.apache.hadoop.hbase.filter;
import java.io.IOException;
-import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.exceptions.DeserializationException;
-import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
-
-import com.google.protobuf.InvalidProtocolBufferException;
/**
* This is a Filter wrapper class which is used in the server side. Some filter
@@ -42,113 +33,9 @@ import com.google.protobuf.InvalidProtocolBufferException;
*
*/
@InterfaceAudience.Private
-final public class FilterWrapper extends Filter {
- Filter filter = null;
-
+final public class FilterWrapper extends BaseDelegateFilter {
public FilterWrapper( Filter filter ) {
- if (null == filter) {
- // ensure the filter instance is not null
- throw new NullPointerException("Cannot create FilterWrapper with null Filter");
- }
- this.filter = filter;
- }
-
- /**
- * @return The filter serialized using pb
- */
- public byte[] toByteArray() throws IOException {
- FilterProtos.FilterWrapper.Builder builder =
- FilterProtos.FilterWrapper.newBuilder();
- builder.setFilter(ProtobufUtil.toFilter(this.filter));
- return builder.build().toByteArray();
- }
-
- /**
- * @param pbBytes A pb serialized {@link FilterWrapper} instance
- * @return An instance of {@link FilterWrapper} made from bytes
- * @throws org.apache.hadoop.hbase.exceptions.DeserializationException
- * @see #toByteArray
- */
- public static FilterWrapper parseFrom(final byte [] pbBytes)
- throws DeserializationException {
- FilterProtos.FilterWrapper proto;
- try {
- proto = FilterProtos.FilterWrapper.parseFrom(pbBytes);
- } catch (InvalidProtocolBufferException e) {
- throw new DeserializationException(e);
- }
- try {
- return new FilterWrapper(ProtobufUtil.toFilter(proto.getFilter()));
- } catch (IOException ioe) {
- throw new DeserializationException(ioe);
- }
- }
-
- @Override
- public void reset() throws IOException {
- this.filter.reset();
- }
-
- @Override
- public boolean filterAllRemaining() throws IOException {
- return this.filter.filterAllRemaining();
- }
-
- @Override
- public boolean filterRow() throws IOException {
- return this.filter.filterRow();
- }
-
- /**
- * This method is deprecated and you should override Cell getNextKeyHint(Cell) instead.
- */
- @Override
- @Deprecated
- public KeyValue getNextKeyHint(KeyValue currentKV) throws IOException {
- return KeyValueUtil.ensureKeyValue(this.filter.getNextCellHint((Cell)currentKV));
- }
-
- /**
- * Old filter wrapper descendants will implement KV getNextKeyHint(KV) so we should call it.
- */
- @Override
- public Cell getNextCellHint(Cell currentKV) throws IOException {
- // Old filters based off of this class will override KeyValue getNextKeyHint(KeyValue).
- // Thus to maintain compatibility we need to call the old version.
- return this.getNextKeyHint(KeyValueUtil.ensureKeyValue(currentKV));
- }
-
- @Override
- public boolean filterRowKey(byte[] buffer, int offset, int length) throws IOException {
- return this.filter.filterRowKey(buffer, offset, length);
- }
-
- @Override
- public ReturnCode filterKeyValue(Cell v) throws IOException {
- return this.filter.filterKeyValue(v);
- }
-
- @Override
- public Cell transformCell(Cell v) throws IOException {
- // Old filters based off of this class will override KeyValue transform(KeyValue).
- // Thus to maintain compatibility we need to call the old version.
- return transform(KeyValueUtil.ensureKeyValue(v));
- }
-
- /**
- * WARNING: please to not override this method. Instead override {@link #transformCell(Cell)}.
- *
- * This is for transition from 0.94 -> 0.96
- */
- @Override
- @Deprecated
- public KeyValue transform(KeyValue currentKV) throws IOException {
- return KeyValueUtil.ensureKeyValue(this.filter.transformCell(currentKV));
- }
-
- @Override
- public boolean hasFilterRow() {
- return this.filter.hasFilterRow();
+ super(filter);
}
@Override
@@ -177,35 +64,4 @@ final public class FilterWrapper extends Filter {
}
return FilterRowRetCode.NOT_CALLED;
}
-
- /**
- * WARNING: please to not override this method. Instead override {@link #transformCell(Cell)}.
- *
- * This is for transition from 0.94 -> 0.96
- */
- @Override
- @Deprecated
- public void filterRow(List kvs) throws IOException {
- // This is only used internally, marked InterfaceAudience.private, and not used anywhere.
- // We can get away with not implementing this.
- throw new UnsupportedOperationException("filterRow(List) should never be called");
- }
-
- @Override
- public boolean isFamilyEssential(byte[] name) throws IOException {
- return filter.isFamilyEssential(name);
- }
-
- /**
- * @param other
- * @return true if and only if the fields of the filter that are serialized
- * are equal to the corresponding fields in other. Used for testing.
- */
- boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof FilterWrapper)) return false;
-
- FilterWrapper other = (FilterWrapper)o;
- return this.filter.areSerializedFieldsEqual(other.filter);
- }
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PreDeleteFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PreDeleteFilter.java
new file mode 100644
index 0000000..64f551a
--- /dev/null
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PreDeleteFilter.java
@@ -0,0 +1,16 @@
+package org.apache.hadoop.hbase.filter;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
+/**
+ * A special Filter that sees _all_ Cells in {@link #filterKeyValue(org.apache.hadoop.hbase.Cell)}.
+ * These include deleted as well as Cells for column not selected by the Scan or Get.
+ * This allows a caller to override the logic in ScanQueryMatcher by implementing a Filter and wrapping
+ * it with this wrapper.
+ */
+@InterfaceAudience.Public
+public class PreDeleteFilter extends BaseDelegateFilter {
+ public PreDeleteFilter( Filter filter ) {
+ super(filter);
+ }
+}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
index 24382c2..a5d5fcf 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.Filter.ReturnCode;
+import org.apache.hadoop.hbase.filter.PreDeleteFilter;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.regionserver.DeleteTracker.DeleteResult;
import org.apache.hadoop.hbase.util.Bytes;
@@ -336,6 +337,26 @@ public class ScanQueryMatcher {
return MatchCode.SKIP;
}
+ ReturnCode filterResponse = ReturnCode.SKIP;
+ if (filter != null && filter instanceof PreDeleteFilter) {
+ // give this filter a chance to run before deletes (and columns) are filtered
+ filterResponse = filter.filterKeyValue(kv);
+ switch (filterResponse) {
+ case SKIP:
+ return MatchCode.SKIP;
+ case NEXT_COL:
+ return columns.getNextRowOrNextColumn(bytes, offset, qualLength);
+ case NEXT_ROW:
+ stickyNextRow = true;
+ return MatchCode.SEEK_NEXT_ROW;
+ case SEEK_NEXT_USING_HINT:
+ return MatchCode.SEEK_NEXT_USING_HINT;
+ default:
+ //It means it is either include or include and seek next
+ break;
+ }
+ }
+
/*
* The delete logic is pretty complicated now.
* This is corroborated by the following:
@@ -430,9 +451,8 @@ public class ScanQueryMatcher {
// STEP 1: Check if the column is part of the requested columns
MatchCode colChecker = columns.checkColumn(bytes, offset, qualLength, type);
if (colChecker == MatchCode.INCLUDE) {
- ReturnCode filterResponse = ReturnCode.SKIP;
// STEP 2: Yes, the column is part of the requested columns. Check if filter is present
- if (filter != null) {
+ if (filter != null && !(filter instanceof PreDeleteFilter)) {
// STEP 3: Filter the key value and return if it filters out
filterResponse = filter.filterKeyValue(kv);
switch (filterResponse) {
|