+ * Scan scan = new Scan();
+ * // set scan properties(range, filters etc).
+ * HTable ht = ...;
+ * Batch.Call<BulkDeleteProtocol, BulkDeleteResponse> callable =
+ * new Batch.Call<BulkDeleteProtocol, BulkDeleteResponse>() {
+ * public BulkDeleteResponse call(BulkDeleteProtocol instance) throws IOException {
+ * return instance.deleteRows(scan, rowBatchSize);
+ * }
+ * };
+ * Map result = ht.coprocessorExec(BulkDeleteProtocol.class,
+ * scan.getStartRow(), scan.getStopRow(), callable);
+ * for (BulkDeleteResponse response : result.values()) {
+ * noOfDeletedRows += response.getRowsDeleted();
+ * }
+ *
+ */
+public interface BulkDeleteProtocol extends CoprocessorProtocol {
+ /**
+ * @param scan
+ * @return {@link BulkDeleteResponse}
+ */
+ BulkDeleteResponse delete(Scan scan);
+}
Index: src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteResponse.java
===================================================================
--- src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteResponse.java (revision 0)
+++ src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteResponse.java (revision 0)
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.coprocessor.example;
+
+import java.io.IOException;
+import java.io.Serializable;
+
+/**
+ * Wrapper class which returns the result of the bulk deletion operation happened at the server for
+ * a region. This includes the total number of rows deleted and/or any {@link IOException} which is
+ * happened while doing the operation.
+ */
+public class BulkDeleteResponse implements Serializable {
+ private static final long serialVersionUID = -8192337710525997237L;
+ private Long rowsDeleted;
+ private IOException ioException;
+
+ public BulkDeleteResponse() {
+
+ }
+
+ public void setRowsDeleted(Long rowsDeleted) {
+ this.rowsDeleted = rowsDeleted;
+ }
+
+ public Long getRowsDeleted() {
+ return rowsDeleted;
+ }
+
+ public void setIoException(IOException ioException) {
+ this.ioException = ioException;
+ }
+
+ public IOException getIoException() {
+ return ioException;
+ }
+}
Index: src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java
===================================================================
--- src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java (revision 0)
+++ src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java (revision 0)
@@ -0,0 +1,313 @@
+/*
+ * Copyright 2011 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.coprocessor.example;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.coprocessor.Batch;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
+import org.apache.hadoop.hbase.coprocessor.example.BulkDeleteEndpoint.DeleteType;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
+import org.apache.hadoop.hbase.filter.FilterList;
+import org.apache.hadoop.hbase.filter.FilterList.Operator;
+import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(MediumTests.class)
+public class TestBulkDeleteProtocol {
+ private static final byte[] FAMILY1 = Bytes.toBytes("cf1");
+ private static final byte[] FAMILY2 = Bytes.toBytes("cf2");
+ private static final byte[] QUALIFIER1 = Bytes.toBytes("c1");
+ private static final byte[] QUALIFIER2 = Bytes.toBytes("c2");
+ private static final byte[] QUALIFIER3 = Bytes.toBytes("c3");
+ private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+
+ @BeforeClass
+ public static void setupBeforeClass() throws Exception {
+ TEST_UTIL.getConfiguration().set(CoprocessorHost.USER_REGION_COPROCESSOR_CONF_KEY,
+ BulkDeleteEndpoint.class.getName());
+ TEST_UTIL.startMiniCluster(2);
+ }
+
+ @AfterClass
+ public static void tearDownAfterClass() throws Exception {
+ TEST_UTIL.shutdownMiniCluster();
+ }
+
+ @Test
+ public void testBulkDeleteEndpoint() throws Throwable {
+ byte[] tableName = Bytes.toBytes("testBulkDeleteEndpoint");
+ HTable ht = createTable(tableName);
+ List