diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java index ae73c51..4bef857 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -23,9 +23,11 @@ import java.io.IOException; import java.io.InterruptedIOException; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.NavigableMap; import java.util.TreeMap; import java.util.concurrent.Callable; @@ -52,10 +54,13 @@ import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback; import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.ipc.PayloadCarryingRpcController; import org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel; +import org.apache.hadoop.hbase.ipc.RegionServerCoprocessorRpcInvoker; +import org.apache.hadoop.hbase.ipc.RegionServerCoprocessorRpcInvoker.BatchErrorsEntry; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.RequestConverter; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; @@ -67,6 +72,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Threads; +import com.google.protobuf.Descriptors; +import com.google.protobuf.Message; import com.google.protobuf.Service; import com.google.protobuf.ServiceException; @@ -1574,4 +1581,103 @@ public class HTable implements HTableInterface { t.close(); } } + + /** + * {@inheritDoc} + */ + @Override + public Map batchCoprocessorService( + Descriptors.ServiceDescriptor serviceDescriptor, String method, Message message, + byte[] startKey, byte[] endKey, R responsePrototype) throws ServiceException, Throwable { + final Map results = Collections.synchronizedMap(new TreeMap( + Bytes.BYTES_COMPARATOR)); + batchCoprocessorService(serviceDescriptor, method, message, startKey, endKey, + new Callback() { + + @Override + public void update(byte[] region, byte[] row, R result) { + if (region != null) { + results.put(region, result); + } + } + }, responsePrototype); + return results; + } + + /** + * {@inheritDoc} + */ + @Override + public void batchCoprocessorService( + final Descriptors.ServiceDescriptor serviceDescriptor, final String method, + final Message message, byte[] startKey, byte[] endKey, final Callback callback, + final R responsePrototype) throws ServiceException, Throwable { + + Map> serverRegions = new HashMap>(); + NavigableMap regions = getRegionLocations(); + for (Entry entry : regions.entrySet()) { + HRegionInfo region = entry.getKey(); + boolean include = false; + byte[] start = region.getStartKey(); + byte[] end = region.getEndKey(); + if (Bytes.compareTo(startKey, start) >= 0) { + if (Bytes.equals(end, HConstants.EMPTY_END_ROW) || Bytes.compareTo(startKey, end) < 0) { + include = true; + } + } else if (Bytes.equals(endKey, HConstants.EMPTY_END_ROW) + || Bytes.compareTo(start, endKey) <= 0) { + include = true; + } else { + break; // past stop + } + + if (include) { + ServerName server = entry.getValue(); + Map regionRows = serverRegions.get(server); + if (regionRows == null) { + regionRows = new TreeMap(Bytes.BYTES_COMPARATOR); + serverRegions.put(server, regionRows); + } + regionRows.put(region.getRegionName(), region.getStartKey()); + } + } + final String serviceFullName = serviceDescriptor.getFullName(); + Map> futures = new TreeMap>(); + final RegionServerCoprocessorRpcInvoker.BatchErrors errors = new RegionServerCoprocessorRpcInvoker.BatchErrors(); + for (Entry> entry : serverRegions.entrySet()) { + final RegionServerCoprocessorRpcInvoker invoker = new RegionServerCoprocessorRpcInvoker( + connection, tableName, entry.getKey(), entry.getValue()); + final ServerName serverName = entry.getKey(); + Future future = pool.submit(new Callable() { + public Void call() throws Exception { + invoker.callExecMultiService(serviceFullName, method, message, callback, + responsePrototype); + if (invoker.hasErrors()) { + for (BatchErrorsEntry entry : invoker.getErrors()) { + errors.add(entry); + } + } + return null; + } + }); + futures.put(serverName.getHostAndPort(), future); + } + for (Map.Entry> e : futures.entrySet()) { + try { + e.getValue().get(); + } catch (ExecutionException ee) { + LOG.warn( + "Error calling coprocessor service " + serviceFullName + " in the server " + e.getKey(), + ee); + throw ee.getCause(); + } catch (InterruptedException ie) { + Thread.currentThread().interrupt(); + throw new InterruptedIOException("Interrupted calling coprocessor service " + + serviceFullName + " in the server " + e.getKey()).initCause(ie); + } + } + if (errors.hasError()) { + throw errors.makeException(); + } + } } diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java index ab34d5b..6a7ca1c 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java @@ -18,6 +18,8 @@ */ package org.apache.hadoop.hbase.client; +import com.google.protobuf.Descriptors; +import com.google.protobuf.Message; import com.google.protobuf.Service; import com.google.protobuf.ServiceException; @@ -585,4 +587,68 @@ public interface HTableInterface extends Closeable { * @throws IOException if a remote or network exception occurs. */ void setWriteBufferSize(long writeBufferSize) throws IOException; + + /** + * Creates an instance of the given {@link com.google.protobuf.Service} subclass for each table + * region spanning the range from the {@code startKey} row to {@code endKey} row (inclusive), all + * the invocations to the same region server will be batched into one call. The coprocessor service is + * invoked according to the service instance, method name and parameters. + * + * @param serviceDescriptor + * the service descriptor + * @param method + * the method name which is executed in the Service. + * @param message + * the parameters + * @param startKey + * start region selection with region containing this row. If {@code null}, the selection + * will start with the first table region. + * @param endKey + * select regions up to and including the region containing this row. If {@code null}, + * selection will continue through the last table region. + * @param responsePrototype + * the proto type of the response of the method in Service. + * @throws ServiceException + * @throws Throwable + * @return a map of result values keyed by region name + */ + @InterfaceAudience.Private + Map batchCoprocessorService( + Descriptors.ServiceDescriptor serviceDescriptor, String method, Message message, + byte[] startKey, byte[] endKey, R responsePrototype) throws ServiceException, Throwable; + + /** + * Creates an instance of the given {@link com.google.protobuf.Service} subclass for each table + * region spanning the range from the {@code startKey} row to {@code endKey} row (inclusive), all + * the invocations to the same region server will be batched into one call. The coprocessor service is + * invoked according to the service instance, method name and parameters. + * + *

+ * The given + * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Callback#update(byte[], byte[], Object)} + * method will be called with the return value from each region's invocation. + *

+ * + * @param serviceDescriptor + * the service descriptor + * @param method + * the method name which is executed in the Service. + * @param message + * the parameters + * @param startKey + * start region selection with region containing this row. If {@code null}, the selection + * will start with the first table region. + * @param endKey + * select regions up to and including the region containing this row. If {@code null}, + * selection will continue through the last table region. + * @param callback + * @param responsePrototype + * the proto type of the response of the method in Service. + * @throws ServiceException + * @throws Throwable + */ + @InterfaceAudience.Private + void batchCoprocessorService(Descriptors.ServiceDescriptor serviceDescriptor, + String method, Message message, byte[] startKey, byte[] endKey, Batch.Callback callback, + R responsePrototype) throws ServiceException, Throwable; } diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java index e0e4e32..f32c6d3 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java @@ -36,6 +36,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.PoolMap; import org.apache.hadoop.hbase.util.PoolMap.PoolType; +import com.google.protobuf.Descriptors; +import com.google.protobuf.Message; import com.google.protobuf.Service; import com.google.protobuf.ServiceException; @@ -622,5 +624,24 @@ public class HTablePool implements Closeable { byte[] qualifier, long amount, boolean writeToWAL) throws IOException { return table.incrementColumnValue(row, family, qualifier, amount, writeToWAL); } + + @Override + public Map batchCoprocessorService( + Descriptors.ServiceDescriptor serviceDescriptor, String method, Message message, + byte[] startKey, byte[] endKey, R responsePrototype) throws ServiceException, Throwable { + checkState(); + return table.batchCoprocessorService(serviceDescriptor, method, message, startKey, endKey, + responsePrototype); + } + + @Override + public void batchCoprocessorService( + Descriptors.ServiceDescriptor serviceDescriptor, String method, Message message, + byte[] startKey, byte[] endKey, Callback callback, R responsePrototype) + throws ServiceException, Throwable { + checkState(); + table.batchCoprocessorService(serviceDescriptor, method, message, startKey, endKey, callback, + responsePrototype); + } } } diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiRegionServerCallable.java hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiRegionServerCallable.java new file mode 100644 index 0000000..70f3a8c --- /dev/null +++ hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiRegionServerCallable.java @@ -0,0 +1,115 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.client; + +import java.io.IOException; +import java.net.ConnectException; +import java.net.SocketTimeoutException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.ServerName; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService; + +/** + * Implementations of a batch call to a RegionServer and implement {@link #call()}. + * Passed to a {@link RpcRetryingCaller} so we retry on fail. + * @param the class that the ServerCallable handles + * + * @param + */ +public abstract class MultiRegionServerCallable implements RetryingCallable { + + static final Log LOG = LogFactory.getLog(MultiRegionServerCallable.class); + + protected final HConnection connection; + protected final TableName tableName; + protected final ServerName serverName; + protected ClientService.BlockingInterface stub; + protected final static int MIN_WAIT_DEAD_SERVER = 10000; + + /** + * @param connection + * Connection to use. + * @param tableName + * Table name to which row belongs. + * @param ServerName + * The server name we want to connect. + */ + public MultiRegionServerCallable(HConnection connection, TableName tableName, + ServerName serverName) { + this.connection = connection; + this.tableName = tableName; + this.serverName = serverName; + } + + /** + * Prepare for connection to the server. + * @param reload this is not used in this implementation + * @throws IOException e + */ + @Override + public void prepare(boolean reload) throws IOException { + setStub(connection.getClient(serverName)); + } + + @Override + public void throwable(Throwable t, boolean retrying) { + if (t instanceof SocketTimeoutException || t instanceof ConnectException + || t instanceof RetriesExhaustedException || connection.isDeadServer(serverName)) { + // if thrown these exceptions, we clear all the cache entries that + // map to that slow/dead server; otherwise, let cache miss and ask + // hbase:meta again to find the new location + connection.clearCaches(serverName); + } + } + + @Override + public String getExceptionMessageAdditionalDetail() { + return "table '" + tableName + " in the server " + serverName; + } + + @Override + public long sleep(long pause, int tries) { + // Tries hasn't been bumped up yet so we use "tries + 1" to get right pause time + long sleep = ConnectionUtils.getPauseTime(pause, tries + 1); + if (sleep < MIN_WAIT_DEAD_SERVER && connection.isDeadServer(serverName)) { + sleep = ConnectionUtils.addJitter(MIN_WAIT_DEAD_SERVER, 0.10f); + } + return sleep; + } + + protected ClientService.BlockingInterface getStub() { + return this.stub; + } + + void setStub(final ClientService.BlockingInterface stub) { + this.stub = stub; + } + + public TableName getTableName() { + return this.tableName; + } + + public ServerName getServerName() { + return this.serverName; + } + +} diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcInvoker.java hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcInvoker.java new file mode 100644 index 0000000..ad2d367 --- /dev/null +++ hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcInvoker.java @@ -0,0 +1,236 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.ipc; + +import static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.ServerName; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.HConnection; +import org.apache.hadoop.hbase.client.MultiRegionServerCallable; +import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException; +import org.apache.hadoop.hbase.client.Row; +import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory; +import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.protobuf.RequestConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse; +import org.apache.hadoop.hbase.util.Bytes; + +import com.google.protobuf.Message; +import com.google.protobuf.ZeroCopyLiteralByteString; + +/** + * Provides clients with an RPC connection to call coprocessor endpoint + * {@link com.google.protobuf.Service}s against a given region server. An instance of this class may + * be obtained by calling org.apache.hadoop.hbase.client.HTable.batchCoprocessorService, + * + */ +public class RegionServerCoprocessorRpcInvoker { + + private static Log LOG = LogFactory.getLog(RegionServerCoprocessorRpcInvoker.class); + + protected final HConnection connection; + protected final TableName tableName; + protected RpcRetryingCallerFactory rpcFactory; + protected ServerName serverName; + protected Map regionRows; + protected List errors = new ArrayList(); + + public RegionServerCoprocessorRpcInvoker(HConnection conn, TableName tableName, + ServerName serverName, Map regionRows) { + this.connection = conn; + this.tableName = tableName; + this.serverName = serverName; + this.regionRows = regionRows; + this.rpcFactory = RpcRetryingCallerFactory.instantiate(conn.getConfiguration()); + } + + public ServerName getServerName() { + return this.serverName; + } + + public boolean hasErrors() { + return !errors.isEmpty(); + } + + /** + * Gets the errors + * + * @return + */ + public List getErrors() { + return this.errors; + } + + @SuppressWarnings("unchecked") + public void callExecMultiService(String service, String method, + Message request, Batch.Callback callback, R responsePrototype) throws IOException { + if (LOG.isTraceEnabled()) { + LOG.trace("Call: " + method + ", " + request.toString()); + } + + final List multiRequests = new ArrayList(); + for (Entry regionRow : regionRows.entrySet()) { + ClientProtos.CoprocessorServiceCall call = ClientProtos.CoprocessorServiceCall.newBuilder() + .setRow(ZeroCopyLiteralByteString.wrap(regionRow.getValue())).setServiceName(service) + .setMethodName(method).setRequest(request.toByteString()).build(); + ClientProtos.CoprocessorServiceRequest cr = ClientProtos.CoprocessorServiceRequest + .newBuilder().setCall(call) + .setRegion(RequestConverter.buildRegionSpecifier(REGION_NAME, regionRow.getKey())) + .build(); + multiRequests.add(cr); + } + + MultiRegionServerCallable callable = new MultiRegionServerCallable( + connection, tableName, serverName) { + public MultiCoprocessorServiceResponse call() throws Exception { + return ProtobufUtil.execMultiService(getStub(), multiRequests); + } + }; + try { + MultiCoprocessorServiceResponse result = rpcFactory + . newCaller().callWithRetries(callable); + List coprocessorResponseOrExceptions = result + .getResultOrExceptionList(); + + if (coprocessorResponseOrExceptions != null) { + for (CoprocessorServiceResponseOrException responseOrException : coprocessorResponseOrExceptions) { + byte[] region = responseOrException.getRegion().getValue().toByteArray(); + Throwable t = ProtobufUtil.toException(responseOrException.getException()); + if (t != null) { + // has error + errors.add(new BatchErrorsEntry(t, new RegionCoprocessServiceExec(regionRows + .get(region)), serverName.getHostAndPort())); + } else { // success + CoprocessorServiceResponse resp = responseOrException.getResponse(); + R r = null; + if (resp.getValue().hasValue()) { + r = (R) responsePrototype.newBuilderForType().mergeFrom(resp.getValue().getValue()) + .build(); + } else { + r = (R) responsePrototype.getDefaultInstanceForType(); + } + callback.update(resp.getRegion().getValue().toByteArray(), + regionRows.get(resp.getRegion().getValue().toByteArray()), r); + + } + } + } + + if (LOG.isTraceEnabled()) { + LOG.trace("Result is value=" + coprocessorResponseOrExceptions); + } + } catch (IOException e) { + // The service itself failed + LOG.warn("Fail to execute the coprocessor server in server " + serverName, e); + errors.add(new BatchErrorsEntry(e, + new RegionCoprocessServiceExec(HConstants.EMPTY_START_ROW), serverName.getHostAndPort())); + } catch (Throwable t) { + // This should not happen + LOG.warn("Fail to execute the coprocessor server in server " + serverName, t); + errors.add(new BatchErrorsEntry(t, + new RegionCoprocessServiceExec(HConstants.EMPTY_START_ROW), serverName.getHostAndPort())); + } + } + + /** + * An execution against a single region. + */ + public static class RegionCoprocessServiceExec implements Row { + + private byte[] startKey; + + public RegionCoprocessServiceExec(byte[] startKey) { + this.startKey = startKey; + } + + @Override + public int compareTo(Row o) { + return Bytes.compareTo(this.getRow(), o.getRow()); + } + + @Override + public byte[] getRow() { + return startKey; + } + + } + + /** + * The entry of the batched errors. + */ + public static class BatchErrorsEntry { + private Throwable t; + Row action; + String address; + + public BatchErrorsEntry(Throwable t, Row action, String address) { + this.t = t; + this.action = action; + this.address = address; + } + } + + /** + * Errors for each batchCoprocessorService. + */ + public static class BatchErrors { + private final List throwables = new ArrayList(); + private final List actions = new ArrayList(); + private final List addresses = new ArrayList(); + + public synchronized void add(BatchErrorsEntry entry) { + if (entry.action == null) { + throw new IllegalArgumentException("row cannot be null. address=" + entry.address); + } + + throwables.add(entry.t); + actions.add(entry.action); + addresses.add(entry.address != null ? entry.address : "null server"); + } + + public boolean hasError() { + return !throwables.isEmpty(); + } + + public synchronized RetriesExhaustedWithDetailsException makeException() { + return new RetriesExhaustedWithDetailsException(new ArrayList(throwables), + new ArrayList(actions), new ArrayList(addresses)); + } + + public synchronized void clear() { + throwables.clear(); + actions.clear(); + addresses.clear(); + } + } +} diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 2f38b3e..255b53d 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -95,6 +95,8 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServic import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue; @@ -2623,4 +2625,27 @@ public final class ProtobufUtil { } return result; } + + /** + * Executes a coprocessor service. + * + * @param client + * @param calls + * @param serverCallbackClassName + * @param initData + * @return + * @throws IOException + */ + public static MultiCoprocessorServiceResponse execMultiService( + final ClientService.BlockingInterface client, + final List requests) throws IOException { + MultiCoprocessorServiceRequest request = MultiCoprocessorServiceRequest.newBuilder() + .addAllRequest(requests).build(); + try { + MultiCoprocessorServiceResponse response = client.execMultiService(null, request); + return response; + } catch (ServiceException se) { + throw getRemoteException(se); + } + } } diff --git hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java index dcdb8fd..f4c077a 100644 --- hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java +++ hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java @@ -57,6 +57,8 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServic import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; @@ -495,6 +497,12 @@ public class TestClientNoCluster extends Configured implements Tool { this.multiInvocationsCount.decrementAndGet(); } } + + @Override + public MultiCoprocessorServiceResponse execMultiService(RpcController controller, + MultiCoprocessorServiceRequest request) throws ServiceException { + throw new NotImplementedException(); + } } static ScanResponse doMetaScanResponse(final SortedMap> meta, diff --git hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java index 9e1952d..ddb9f33 100644 --- hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java +++ hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java @@ -28315,6 +28315,2495 @@ public final class ClientProtos { // @@protoc_insertion_point(class_scope:MultiResponse) } + public interface CoprocessorServiceResponseOrExceptionOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + /** + * required .RegionSpecifier region = 1; + */ + boolean hasRegion(); + /** + * required .RegionSpecifier region = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + /** + * required .RegionSpecifier region = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional .CoprocessorServiceResponse response = 2; + /** + * optional .CoprocessorServiceResponse response = 2; + */ + boolean hasResponse(); + /** + * optional .CoprocessorServiceResponse response = 2; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse getResponse(); + /** + * optional .CoprocessorServiceResponse response = 2; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrBuilder getResponseOrBuilder(); + + // optional .NameBytesPair exception = 3; + /** + * optional .NameBytesPair exception = 3; + * + *
+     * If the operation failed, this exception is set
+     * 
+ */ + boolean hasException(); + /** + * optional .NameBytesPair exception = 3; + * + *
+     * If the operation failed, this exception is set
+     * 
+ */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException(); + /** + * optional .NameBytesPair exception = 3; + * + *
+     * If the operation failed, this exception is set
+     * 
+ */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder(); + } + /** + * Protobuf type {@code CoprocessorServiceResponseOrException} + */ + public static final class CoprocessorServiceResponseOrException extends + com.google.protobuf.GeneratedMessage + implements CoprocessorServiceResponseOrExceptionOrBuilder { + // Use CoprocessorServiceResponseOrException.newBuilder() to construct. + private CoprocessorServiceResponseOrException(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private CoprocessorServiceResponseOrException(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final CoprocessorServiceResponseOrException defaultInstance; + public static CoprocessorServiceResponseOrException getDefaultInstance() { + return defaultInstance; + } + + public CoprocessorServiceResponseOrException getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CoprocessorServiceResponseOrException( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = region_.toBuilder(); + } + region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(region_); + region_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = response_.toBuilder(); + } + response_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(response_); + response_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + subBuilder = exception_.toBuilder(); + } + exception_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(exception_); + exception_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000004; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponseOrException_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponseOrException_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CoprocessorServiceResponseOrException parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CoprocessorServiceResponseOrException(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + /** + * required .RegionSpecifier region = 1; + */ + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .RegionSpecifier region = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + /** + * required .RegionSpecifier region = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional .CoprocessorServiceResponse response = 2; + public static final int RESPONSE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse response_; + /** + * optional .CoprocessorServiceResponse response = 2; + */ + public boolean hasResponse() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional .CoprocessorServiceResponse response = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse getResponse() { + return response_; + } + /** + * optional .CoprocessorServiceResponse response = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrBuilder getResponseOrBuilder() { + return response_; + } + + // optional .NameBytesPair exception = 3; + public static final int EXCEPTION_FIELD_NUMBER = 3; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_; + /** + * optional .NameBytesPair exception = 3; + * + *
+     * If the operation failed, this exception is set
+     * 
+ */ + public boolean hasException() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional .NameBytesPair exception = 3; + * + *
+     * If the operation failed, this exception is set
+     * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() { + return exception_; + } + /** + * optional .NameBytesPair exception = 3; + * + *
+     * If the operation failed, this exception is set
+     * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { + return exception_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + response_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (hasResponse()) { + if (!getResponse().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasException()) { + if (!getException().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, response_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeMessage(3, exception_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, response_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, exception_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasResponse() == other.hasResponse()); + if (hasResponse()) { + result = result && getResponse() + .equals(other.getResponse()); + } + result = result && (hasException() == other.hasException()); + if (hasException()) { + result = result && getException() + .equals(other.getException()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasResponse()) { + hash = (37 * hash) + RESPONSE_FIELD_NUMBER; + hash = (53 * hash) + getResponse().hashCode(); + } + if (hasException()) { + hash = (37 * hash) + EXCEPTION_FIELD_NUMBER; + hash = (53 * hash) + getException().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code CoprocessorServiceResponseOrException} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrExceptionOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponseOrException_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponseOrException_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getResponseFieldBuilder(); + getExceptionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (responseBuilder_ == null) { + response_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + } else { + responseBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + if (exceptionBuilder_ == null) { + exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + } else { + exceptionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponseOrException_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (responseBuilder_ == null) { + result.response_ = response_; + } else { + result.response_ = responseBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + if (exceptionBuilder_ == null) { + result.exception_ = exception_; + } else { + result.exception_ = exceptionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasResponse()) { + mergeResponse(other.getResponse()); + } + if (other.hasException()) { + mergeException(other.getException()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + if (hasResponse()) { + if (!getResponse().isInitialized()) { + + return false; + } + } + if (hasException()) { + if (!getException().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + /** + * required .RegionSpecifier region = 1; + */ + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .RegionSpecifier region = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + /** + * required .RegionSpecifier region = 1; + */ + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .RegionSpecifier region = 1; + */ + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .RegionSpecifier region = 1; + */ + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .RegionSpecifier region = 1; + */ + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + /** + * required .RegionSpecifier region = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + /** + * required .RegionSpecifier region = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + /** + * required .RegionSpecifier region = 1; + */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional .CoprocessorServiceResponse response = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse response_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrBuilder> responseBuilder_; + /** + * optional .CoprocessorServiceResponse response = 2; + */ + public boolean hasResponse() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional .CoprocessorServiceResponse response = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse getResponse() { + if (responseBuilder_ == null) { + return response_; + } else { + return responseBuilder_.getMessage(); + } + } + /** + * optional .CoprocessorServiceResponse response = 2; + */ + public Builder setResponse(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse value) { + if (responseBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + response_ = value; + onChanged(); + } else { + responseBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .CoprocessorServiceResponse response = 2; + */ + public Builder setResponse( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder builderForValue) { + if (responseBuilder_ == null) { + response_ = builderForValue.build(); + onChanged(); + } else { + responseBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .CoprocessorServiceResponse response = 2; + */ + public Builder mergeResponse(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse value) { + if (responseBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + response_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()) { + response_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.newBuilder(response_).mergeFrom(value).buildPartial(); + } else { + response_ = value; + } + onChanged(); + } else { + responseBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + /** + * optional .CoprocessorServiceResponse response = 2; + */ + public Builder clearResponse() { + if (responseBuilder_ == null) { + response_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); + onChanged(); + } else { + responseBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + /** + * optional .CoprocessorServiceResponse response = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder getResponseBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getResponseFieldBuilder().getBuilder(); + } + /** + * optional .CoprocessorServiceResponse response = 2; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrBuilder getResponseOrBuilder() { + if (responseBuilder_ != null) { + return responseBuilder_.getMessageOrBuilder(); + } else { + return response_; + } + } + /** + * optional .CoprocessorServiceResponse response = 2; + */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrBuilder> + getResponseFieldBuilder() { + if (responseBuilder_ == null) { + responseBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrBuilder>( + response_, + getParentForChildren(), + isClean()); + response_ = null; + } + return responseBuilder_; + } + + // optional .NameBytesPair exception = 3; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_; + /** + * optional .NameBytesPair exception = 3; + * + *
+       * If the operation failed, this exception is set
+       * 
+ */ + public boolean hasException() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional .NameBytesPair exception = 3; + * + *
+       * If the operation failed, this exception is set
+       * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() { + if (exceptionBuilder_ == null) { + return exception_; + } else { + return exceptionBuilder_.getMessage(); + } + } + /** + * optional .NameBytesPair exception = 3; + * + *
+       * If the operation failed, this exception is set
+       * 
+ */ + public Builder setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (exceptionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + exception_ = value; + onChanged(); + } else { + exceptionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + return this; + } + /** + * optional .NameBytesPair exception = 3; + * + *
+       * If the operation failed, this exception is set
+       * 
+ */ + public Builder setException( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { + if (exceptionBuilder_ == null) { + exception_ = builderForValue.build(); + onChanged(); + } else { + exceptionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + return this; + } + /** + * optional .NameBytesPair exception = 3; + * + *
+       * If the operation failed, this exception is set
+       * 
+ */ + public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { + if (exceptionBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004) && + exception_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) { + exception_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial(); + } else { + exception_ = value; + } + onChanged(); + } else { + exceptionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + return this; + } + /** + * optional .NameBytesPair exception = 3; + * + *
+       * If the operation failed, this exception is set
+       * 
+ */ + public Builder clearException() { + if (exceptionBuilder_ == null) { + exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); + onChanged(); + } else { + exceptionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + /** + * optional .NameBytesPair exception = 3; + * + *
+       * If the operation failed, this exception is set
+       * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getExceptionFieldBuilder().getBuilder(); + } + /** + * optional .NameBytesPair exception = 3; + * + *
+       * If the operation failed, this exception is set
+       * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() { + if (exceptionBuilder_ != null) { + return exceptionBuilder_.getMessageOrBuilder(); + } else { + return exception_; + } + } + /** + * optional .NameBytesPair exception = 3; + * + *
+       * If the operation failed, this exception is set
+       * 
+ */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> + getExceptionFieldBuilder() { + if (exceptionBuilder_ == null) { + exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( + exception_, + getParentForChildren(), + isClean()); + exception_ = null; + } + return exceptionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:CoprocessorServiceResponseOrException) + } + + static { + defaultInstance = new CoprocessorServiceResponseOrException(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CoprocessorServiceResponseOrException) + } + + public interface MultiCoprocessorServiceRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .CoprocessorServiceRequest request = 1; + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + java.util.List + getRequestList(); + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest getRequest(int index); + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + int getRequestCount(); + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + java.util.List + getRequestOrBuilderList(); + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequestOrBuilder getRequestOrBuilder( + int index); + } + /** + * Protobuf type {@code MultiCoprocessorServiceRequest} + */ + public static final class MultiCoprocessorServiceRequest extends + com.google.protobuf.GeneratedMessage + implements MultiCoprocessorServiceRequestOrBuilder { + // Use MultiCoprocessorServiceRequest.newBuilder() to construct. + private MultiCoprocessorServiceRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private MultiCoprocessorServiceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final MultiCoprocessorServiceRequest defaultInstance; + public static MultiCoprocessorServiceRequest getDefaultInstance() { + return defaultInstance; + } + + public MultiCoprocessorServiceRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MultiCoprocessorServiceRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + request_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + request_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + request_ = java.util.Collections.unmodifiableList(request_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiCoprocessorServiceRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiCoprocessorServiceRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MultiCoprocessorServiceRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiCoprocessorServiceRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .CoprocessorServiceRequest request = 1; + public static final int REQUEST_FIELD_NUMBER = 1; + private java.util.List request_; + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public java.util.List getRequestList() { + return request_; + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public java.util.List + getRequestOrBuilderList() { + return request_; + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public int getRequestCount() { + return request_.size(); + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest getRequest(int index) { + return request_.get(index); + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequestOrBuilder getRequestOrBuilder( + int index) { + return request_.get(index); + } + + private void initFields() { + request_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getRequestCount(); i++) { + if (!getRequest(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < request_.size(); i++) { + output.writeMessage(1, request_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < request_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, request_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest) obj; + + boolean result = true; + result = result && getRequestList() + .equals(other.getRequestList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getRequestCount() > 0) { + hash = (37 * hash) + REQUEST_FIELD_NUMBER; + hash = (53 * hash) + getRequestList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code MultiCoprocessorServiceRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiCoprocessorServiceRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiCoprocessorServiceRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRequestFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (requestBuilder_ == null) { + request_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + requestBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiCoprocessorServiceRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest(this); + int from_bitField0_ = bitField0_; + if (requestBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + request_ = java.util.Collections.unmodifiableList(request_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.request_ = request_; + } else { + result.request_ = requestBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest.getDefaultInstance()) return this; + if (requestBuilder_ == null) { + if (!other.request_.isEmpty()) { + if (request_.isEmpty()) { + request_ = other.request_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureRequestIsMutable(); + request_.addAll(other.request_); + } + onChanged(); + } + } else { + if (!other.request_.isEmpty()) { + if (requestBuilder_.isEmpty()) { + requestBuilder_.dispose(); + requestBuilder_ = null; + request_ = other.request_; + bitField0_ = (bitField0_ & ~0x00000001); + requestBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getRequestFieldBuilder() : null; + } else { + requestBuilder_.addAllMessages(other.request_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getRequestCount(); i++) { + if (!getRequest(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .CoprocessorServiceRequest request = 1; + private java.util.List request_ = + java.util.Collections.emptyList(); + private void ensureRequestIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + request_ = new java.util.ArrayList(request_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequestOrBuilder> requestBuilder_; + + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public java.util.List getRequestList() { + if (requestBuilder_ == null) { + return java.util.Collections.unmodifiableList(request_); + } else { + return requestBuilder_.getMessageList(); + } + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public int getRequestCount() { + if (requestBuilder_ == null) { + return request_.size(); + } else { + return requestBuilder_.getCount(); + } + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest getRequest(int index) { + if (requestBuilder_ == null) { + return request_.get(index); + } else { + return requestBuilder_.getMessage(index); + } + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public Builder setRequest( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest value) { + if (requestBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRequestIsMutable(); + request_.set(index, value); + onChanged(); + } else { + requestBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public Builder setRequest( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder builderForValue) { + if (requestBuilder_ == null) { + ensureRequestIsMutable(); + request_.set(index, builderForValue.build()); + onChanged(); + } else { + requestBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public Builder addRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest value) { + if (requestBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRequestIsMutable(); + request_.add(value); + onChanged(); + } else { + requestBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public Builder addRequest( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest value) { + if (requestBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRequestIsMutable(); + request_.add(index, value); + onChanged(); + } else { + requestBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public Builder addRequest( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder builderForValue) { + if (requestBuilder_ == null) { + ensureRequestIsMutable(); + request_.add(builderForValue.build()); + onChanged(); + } else { + requestBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public Builder addRequest( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder builderForValue) { + if (requestBuilder_ == null) { + ensureRequestIsMutable(); + request_.add(index, builderForValue.build()); + onChanged(); + } else { + requestBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public Builder addAllRequest( + java.lang.Iterable values) { + if (requestBuilder_ == null) { + ensureRequestIsMutable(); + super.addAll(values, request_); + onChanged(); + } else { + requestBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public Builder clearRequest() { + if (requestBuilder_ == null) { + request_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + requestBuilder_.clear(); + } + return this; + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public Builder removeRequest(int index) { + if (requestBuilder_ == null) { + ensureRequestIsMutable(); + request_.remove(index); + onChanged(); + } else { + requestBuilder_.remove(index); + } + return this; + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder getRequestBuilder( + int index) { + return getRequestFieldBuilder().getBuilder(index); + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequestOrBuilder getRequestOrBuilder( + int index) { + if (requestBuilder_ == null) { + return request_.get(index); } else { + return requestBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public java.util.List + getRequestOrBuilderList() { + if (requestBuilder_ != null) { + return requestBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(request_); + } + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder addRequestBuilder() { + return getRequestFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance()); + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder addRequestBuilder( + int index) { + return getRequestFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance()); + } + /** + * repeated .CoprocessorServiceRequest request = 1; + */ + public java.util.List + getRequestBuilderList() { + return getRequestFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequestOrBuilder> + getRequestFieldBuilder() { + if (requestBuilder_ == null) { + requestBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequestOrBuilder>( + request_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + request_ = null; + } + return requestBuilder_; + } + + // @@protoc_insertion_point(builder_scope:MultiCoprocessorServiceRequest) + } + + static { + defaultInstance = new MultiCoprocessorServiceRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MultiCoprocessorServiceRequest) + } + + public interface MultiCoprocessorServiceResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .CoprocessorServiceResponseOrException resultOrException = 1; + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + java.util.List + getResultOrExceptionList(); + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException getResultOrException(int index); + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + int getResultOrExceptionCount(); + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + java.util.List + getResultOrExceptionOrBuilderList(); + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrExceptionOrBuilder getResultOrExceptionOrBuilder( + int index); + } + /** + * Protobuf type {@code MultiCoprocessorServiceResponse} + */ + public static final class MultiCoprocessorServiceResponse extends + com.google.protobuf.GeneratedMessage + implements MultiCoprocessorServiceResponseOrBuilder { + // Use MultiCoprocessorServiceResponse.newBuilder() to construct. + private MultiCoprocessorServiceResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private MultiCoprocessorServiceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final MultiCoprocessorServiceResponse defaultInstance; + public static MultiCoprocessorServiceResponse getDefaultInstance() { + return defaultInstance; + } + + public MultiCoprocessorServiceResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private MultiCoprocessorServiceResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + resultOrException_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + resultOrException_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + resultOrException_ = java.util.Collections.unmodifiableList(resultOrException_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiCoprocessorServiceResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiCoprocessorServiceResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public MultiCoprocessorServiceResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new MultiCoprocessorServiceResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .CoprocessorServiceResponseOrException resultOrException = 1; + public static final int RESULTOREXCEPTION_FIELD_NUMBER = 1; + private java.util.List resultOrException_; + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public java.util.List getResultOrExceptionList() { + return resultOrException_; + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public java.util.List + getResultOrExceptionOrBuilderList() { + return resultOrException_; + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public int getResultOrExceptionCount() { + return resultOrException_.size(); + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException getResultOrException(int index) { + return resultOrException_.get(index); + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrExceptionOrBuilder getResultOrExceptionOrBuilder( + int index) { + return resultOrException_.get(index); + } + + private void initFields() { + resultOrException_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getResultOrExceptionCount(); i++) { + if (!getResultOrException(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < resultOrException_.size(); i++) { + output.writeMessage(1, resultOrException_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < resultOrException_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, resultOrException_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse) obj; + + boolean result = true; + result = result && getResultOrExceptionList() + .equals(other.getResultOrExceptionList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getResultOrExceptionCount() > 0) { + hash = (37 * hash) + RESULTOREXCEPTION_FIELD_NUMBER; + hash = (53 * hash) + getResultOrExceptionList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code MultiCoprocessorServiceResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiCoprocessorServiceResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiCoprocessorServiceResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResultOrExceptionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (resultOrExceptionBuilder_ == null) { + resultOrException_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + resultOrExceptionBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiCoprocessorServiceResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse(this); + int from_bitField0_ = bitField0_; + if (resultOrExceptionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + resultOrException_ = java.util.Collections.unmodifiableList(resultOrException_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.resultOrException_ = resultOrException_; + } else { + result.resultOrException_ = resultOrExceptionBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse.getDefaultInstance()) return this; + if (resultOrExceptionBuilder_ == null) { + if (!other.resultOrException_.isEmpty()) { + if (resultOrException_.isEmpty()) { + resultOrException_ = other.resultOrException_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureResultOrExceptionIsMutable(); + resultOrException_.addAll(other.resultOrException_); + } + onChanged(); + } + } else { + if (!other.resultOrException_.isEmpty()) { + if (resultOrExceptionBuilder_.isEmpty()) { + resultOrExceptionBuilder_.dispose(); + resultOrExceptionBuilder_ = null; + resultOrException_ = other.resultOrException_; + bitField0_ = (bitField0_ & ~0x00000001); + resultOrExceptionBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getResultOrExceptionFieldBuilder() : null; + } else { + resultOrExceptionBuilder_.addAllMessages(other.resultOrException_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getResultOrExceptionCount(); i++) { + if (!getResultOrException(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .CoprocessorServiceResponseOrException resultOrException = 1; + private java.util.List resultOrException_ = + java.util.Collections.emptyList(); + private void ensureResultOrExceptionIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + resultOrException_ = new java.util.ArrayList(resultOrException_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrExceptionOrBuilder> resultOrExceptionBuilder_; + + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public java.util.List getResultOrExceptionList() { + if (resultOrExceptionBuilder_ == null) { + return java.util.Collections.unmodifiableList(resultOrException_); + } else { + return resultOrExceptionBuilder_.getMessageList(); + } + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public int getResultOrExceptionCount() { + if (resultOrExceptionBuilder_ == null) { + return resultOrException_.size(); + } else { + return resultOrExceptionBuilder_.getCount(); + } + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException getResultOrException(int index) { + if (resultOrExceptionBuilder_ == null) { + return resultOrException_.get(index); + } else { + return resultOrExceptionBuilder_.getMessage(index); + } + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public Builder setResultOrException( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException value) { + if (resultOrExceptionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultOrExceptionIsMutable(); + resultOrException_.set(index, value); + onChanged(); + } else { + resultOrExceptionBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public Builder setResultOrException( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.Builder builderForValue) { + if (resultOrExceptionBuilder_ == null) { + ensureResultOrExceptionIsMutable(); + resultOrException_.set(index, builderForValue.build()); + onChanged(); + } else { + resultOrExceptionBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public Builder addResultOrException(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException value) { + if (resultOrExceptionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultOrExceptionIsMutable(); + resultOrException_.add(value); + onChanged(); + } else { + resultOrExceptionBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public Builder addResultOrException( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException value) { + if (resultOrExceptionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultOrExceptionIsMutable(); + resultOrException_.add(index, value); + onChanged(); + } else { + resultOrExceptionBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public Builder addResultOrException( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.Builder builderForValue) { + if (resultOrExceptionBuilder_ == null) { + ensureResultOrExceptionIsMutable(); + resultOrException_.add(builderForValue.build()); + onChanged(); + } else { + resultOrExceptionBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public Builder addResultOrException( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.Builder builderForValue) { + if (resultOrExceptionBuilder_ == null) { + ensureResultOrExceptionIsMutable(); + resultOrException_.add(index, builderForValue.build()); + onChanged(); + } else { + resultOrExceptionBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public Builder addAllResultOrException( + java.lang.Iterable values) { + if (resultOrExceptionBuilder_ == null) { + ensureResultOrExceptionIsMutable(); + super.addAll(values, resultOrException_); + onChanged(); + } else { + resultOrExceptionBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public Builder clearResultOrException() { + if (resultOrExceptionBuilder_ == null) { + resultOrException_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + resultOrExceptionBuilder_.clear(); + } + return this; + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public Builder removeResultOrException(int index) { + if (resultOrExceptionBuilder_ == null) { + ensureResultOrExceptionIsMutable(); + resultOrException_.remove(index); + onChanged(); + } else { + resultOrExceptionBuilder_.remove(index); + } + return this; + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.Builder getResultOrExceptionBuilder( + int index) { + return getResultOrExceptionFieldBuilder().getBuilder(index); + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrExceptionOrBuilder getResultOrExceptionOrBuilder( + int index) { + if (resultOrExceptionBuilder_ == null) { + return resultOrException_.get(index); } else { + return resultOrExceptionBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public java.util.List + getResultOrExceptionOrBuilderList() { + if (resultOrExceptionBuilder_ != null) { + return resultOrExceptionBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(resultOrException_); + } + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.Builder addResultOrExceptionBuilder() { + return getResultOrExceptionFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.getDefaultInstance()); + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.Builder addResultOrExceptionBuilder( + int index) { + return getResultOrExceptionFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.getDefaultInstance()); + } + /** + * repeated .CoprocessorServiceResponseOrException resultOrException = 1; + */ + public java.util.List + getResultOrExceptionBuilderList() { + return getResultOrExceptionFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrExceptionOrBuilder> + getResultOrExceptionFieldBuilder() { + if (resultOrExceptionBuilder_ == null) { + resultOrExceptionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrExceptionOrBuilder>( + resultOrException_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + resultOrException_ = null; + } + return resultOrExceptionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:MultiCoprocessorServiceResponse) + } + + static { + defaultInstance = new MultiCoprocessorServiceResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MultiCoprocessorServiceResponse) + } + /** * Protobuf service {@code ClientService} */ @@ -28371,6 +30860,14 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, com.google.protobuf.RpcCallback done); + /** + * rpc ExecMultiService(.MultiCoprocessorServiceRequest) returns (.MultiCoprocessorServiceResponse); + */ + public abstract void execMultiService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest request, + com.google.protobuf.RpcCallback done); + } public static com.google.protobuf.Service newReflectiveService( @@ -28424,6 +30921,14 @@ public final class ClientProtos { impl.multi(controller, request, done); } + @java.lang.Override + public void execMultiService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest request, + com.google.protobuf.RpcCallback done) { + impl.execMultiService(controller, request, done); + } + }; } @@ -28458,6 +30963,8 @@ public final class ClientProtos { return impl.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request); case 5: return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request); + case 6: + return impl.execMultiService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest)request); default: throw new java.lang.AssertionError("Can't get here."); } @@ -28484,6 +30991,8 @@ public final class ClientProtos { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -28510,6 +31019,8 @@ public final class ClientProtos { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -28566,6 +31077,14 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, com.google.protobuf.RpcCallback done); + /** + * rpc ExecMultiService(.MultiCoprocessorServiceRequest) returns (.MultiCoprocessorServiceResponse); + */ + public abstract void execMultiService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest request, + com.google.protobuf.RpcCallback done); + public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { @@ -28618,6 +31137,11 @@ public final class ClientProtos { com.google.protobuf.RpcUtil.specializeCallback( done)); return; + case 6: + this.execMultiService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; default: throw new java.lang.AssertionError("Can't get here."); } @@ -28644,6 +31168,8 @@ public final class ClientProtos { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -28670,6 +31196,8 @@ public final class ClientProtos { return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(); case 5: return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } @@ -28780,6 +31308,21 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance())); } + + public void execMultiService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse.getDefaultInstance())); + } } public static BlockingInterface newBlockingStub( @@ -28817,6 +31360,11 @@ public final class ClientProtos { com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request) throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse execMultiService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest request) + throws com.google.protobuf.ServiceException; } private static final class BlockingStub implements BlockingInterface { @@ -28897,6 +31445,18 @@ public final class ClientProtos { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()); } + + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse execMultiService( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse.getDefaultInstance()); + } + } // @@protoc_insertion_point(class_scope:ClientService) @@ -29042,6 +31602,21 @@ public final class ClientProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_MultiResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CoprocessorServiceResponseOrException_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CoprocessorServiceResponseOrException_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MultiCoprocessorServiceRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MultiCoprocessorServiceRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MultiCoprocessorServiceResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MultiCoprocessorServiceResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -29139,16 +31714,27 @@ public final class ClientProtos { "#\n\014regionAction\030\001 \003(\0132\r.RegionAction\022\022\n\n" + "nonceGroup\030\002 \001(\004\"@\n\rMultiResponse\022/\n\022reg" + "ionActionResult\030\001 \003(\0132\023.RegionActionResu" + - "lt2\261\002\n\rClientService\022 \n\003Get\022\013.GetRequest" + - "\032\014.GetResponse\022)\n\006Mutate\022\016.MutateRequest", - "\032\017.MutateResponse\022#\n\004Scan\022\014.ScanRequest\032" + - "\r.ScanResponse\022>\n\rBulkLoadHFile\022\025.BulkLo" + - "adHFileRequest\032\026.BulkLoadHFileResponse\022F" + - "\n\013ExecService\022\032.CoprocessorServiceReques" + - "t\032\033.CoprocessorServiceResponse\022&\n\005Multi\022" + - "\r.MultiRequest\032\016.MultiResponseBB\n*org.ap" + - "ache.hadoop.hbase.protobuf.generatedB\014Cl" + - "ientProtosH\001\210\001\001\240\001\001" + "lt\"\233\001\n%CoprocessorServiceResponseOrExcep" + + "tion\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022-", + "\n\010response\030\002 \001(\0132\033.CoprocessorServiceRes" + + "ponse\022!\n\texception\030\003 \001(\0132\016.NameBytesPair" + + "\"M\n\036MultiCoprocessorServiceRequest\022+\n\007re" + + "quest\030\001 \003(\0132\032.CoprocessorServiceRequest\"" + + "d\n\037MultiCoprocessorServiceResponse\022A\n\021re" + + "sultOrException\030\001 \003(\0132&.CoprocessorServi" + + "ceResponseOrException2\210\003\n\rClientService\022" + + " \n\003Get\022\013.GetRequest\032\014.GetResponse\022)\n\006Mut" + + "ate\022\016.MutateRequest\032\017.MutateResponse\022#\n\004" + + "Scan\022\014.ScanRequest\032\r.ScanResponse\022>\n\rBul", + "kLoadHFile\022\025.BulkLoadHFileRequest\032\026.Bulk" + + "LoadHFileResponse\022F\n\013ExecService\022\032.Copro" + + "cessorServiceRequest\032\033.CoprocessorServic" + + "eResponse\022&\n\005Multi\022\r.MultiRequest\032\016.Mult" + + "iResponse\022U\n\020ExecMultiService\022\037.MultiCop" + + "rocessorServiceRequest\032 .MultiCoprocesso" + + "rServiceResponseBB\n*org.apache.hadoop.hb" + + "ase.protobuf.generatedB\014ClientProtosH\001\210\001" + + "\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -29323,6 +31909,24 @@ public final class ClientProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_MultiResponse_descriptor, new java.lang.String[] { "RegionActionResult", }); + internal_static_CoprocessorServiceResponseOrException_descriptor = + getDescriptor().getMessageTypes().get(25); + internal_static_CoprocessorServiceResponseOrException_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CoprocessorServiceResponseOrException_descriptor, + new java.lang.String[] { "Region", "Response", "Exception", }); + internal_static_MultiCoprocessorServiceRequest_descriptor = + getDescriptor().getMessageTypes().get(26); + internal_static_MultiCoprocessorServiceRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MultiCoprocessorServiceRequest_descriptor, + new java.lang.String[] { "Request", }); + internal_static_MultiCoprocessorServiceResponse_descriptor = + getDescriptor().getMessageTypes().get(27); + internal_static_MultiCoprocessorServiceResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MultiCoprocessorServiceResponse_descriptor, + new java.lang.String[] { "ResultOrException", }); return null; } }; diff --git hbase-protocol/src/main/protobuf/Client.proto hbase-protocol/src/main/protobuf/Client.proto index 180b711..63222e3 100644 --- hbase-protocol/src/main/protobuf/Client.proto +++ hbase-protocol/src/main/protobuf/Client.proto @@ -389,4 +389,22 @@ service ClientService { rpc Multi(MultiRequest) returns(MultiResponse); + + rpc ExecMultiService(MultiCoprocessorServiceRequest) + returns(MultiCoprocessorServiceResponse); +} + +message CoprocessorServiceResponseOrException { + required RegionSpecifier region = 1; + optional CoprocessorServiceResponse response = 2; + // If the operation failed, this exception is set + optional NameBytesPair exception = 3; +} + +message MultiCoprocessorServiceRequest { + repeated CoprocessorServiceRequest request = 1; +} + +message MultiCoprocessorServiceResponse { + repeated CoprocessorServiceResponseOrException resultOrException = 1; } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java index cb6796d..1e01f2d 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java @@ -62,6 +62,7 @@ import org.apache.hadoop.hbase.client.Row; import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CoprocessorClassLoader; @@ -69,6 +70,8 @@ import org.apache.hadoop.hbase.util.SortedCopyOnWriteSet; import org.apache.hadoop.hbase.util.VersionInfo; import org.apache.hadoop.io.MultipleIOException; +import com.google.protobuf.Descriptors; +import com.google.protobuf.Message; import com.google.protobuf.Service; import com.google.protobuf.ServiceException; @@ -612,6 +615,23 @@ public abstract class CoprocessorHost { byte[] qualifier, long amount, boolean writeToWAL) throws IOException { return table.incrementColumnValue(row, family, qualifier, amount, writeToWAL); } + + @Override + public Map batchCoprocessorService( + Descriptors.ServiceDescriptor serviceDescriptor, String method, Message message, + byte[] startKey, byte[] endKey, R responsePrototype) throws ServiceException, Throwable { + return table.batchCoprocessorService(serviceDescriptor, method, message, startKey, endKey, + responsePrototype); + } + + @Override + public void batchCoprocessorService( + Descriptors.ServiceDescriptor serviceDescriptor, String method, Message message, + byte[] startKey, byte[] endKey, Callback callback, R responsePrototype) + throws ServiceException, Throwable { + table.batchCoprocessorService(serviceDescriptor, method, message, startKey, endKey, + callback, responsePrototype); + } } /** The coprocessor */ diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 59005b3..85fc840 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -43,9 +43,17 @@ import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.TreeSet; +import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentSkipListMap; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.concurrent.RejectedExecutionException; +import java.util.concurrent.RejectedExecutionHandler; +import java.util.concurrent.SynchronousQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantReadWriteLock; import javax.management.ObjectName; @@ -159,8 +167,11 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResp import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrException; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; @@ -520,6 +531,9 @@ public class HRegionServer implements ClientProtos.ClientService.BlockingInterfa private UserProvider userProvider; + //thread pool for multi coprocessor execution + protected java.util.concurrent.ExecutorService pool; + /** * Starts a HRegionServer at the default location * @@ -567,6 +581,27 @@ public class HRegionServer implements ClientProtos.ClientService.BlockingInterfa HConstants.HBASE_REGIONSERVER_LEASE_PERIOD_KEY, HConstants.DEFAULT_HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD); + int poolSize = conf.getInt("hbase.regionserver.multi.coprocessor.pool.size", Integer.MAX_VALUE); + if (poolSize <= 0) { + poolSize = Integer.MAX_VALUE; + } + final SynchronousQueue blockingQueue = new SynchronousQueue(); + RejectedExecutionHandler rejectHandler = new RejectedExecutionHandler() { + + @Override + public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { + try { + blockingQueue.put(r); + } catch (InterruptedException e) { + throw new RejectedExecutionException(e); + } + } + }; + long keepAliveTime = conf.getLong("hbase.regionserver.threads.keepalivetime", 60); + pool = new ThreadPoolExecutor(1, poolSize, keepAliveTime, TimeUnit.SECONDS, + blockingQueue, Threads.newDaemonThreadFactory("multi-coprocessor-"), rejectHandler); + ((ThreadPoolExecutor) pool).allowCoreThreadTimeOut(true); + // Server to handle client requests. String hostname = conf.get("hbase.regionserver.ipc.address", Strings.domainNamePointerToHostName(DNS.getDefaultHost( @@ -4568,4 +4603,81 @@ public class HRegionServer implements ClientProtos.ClientService.BlockingInterfa respBuilder.setResponse(openInfoList.size()); return respBuilder.build(); } + + /** + * Executes the coprocessor services against regions in a batch. + * + * @param controller + * @param request + * @throws ServiceException + */ + @Override + public MultiCoprocessorServiceResponse execMultiService(RpcController controller, + MultiCoprocessorServiceRequest request) throws ServiceException { + try { + checkOpen(); + } catch (IOException e) { + throw new ServiceException(e); + } + + requestCount.increment(); + List coprocessorRequests = request.getRequestList(); + Map> futures = new TreeMap>( + Bytes.BYTES_COMPARATOR); + for (final CoprocessorServiceRequest coprocessorRequest : coprocessorRequests) { + Future future = pool.submit(new Callable() { + + @Override + public Message call() throws Exception { + try { + HRegion region = getRegion(coprocessorRequest.getRegion()); + // ignore the passed in controller (from the serialized call) + ServerRpcController execController = new ServerRpcController(); + Message result = region.execService(execController, coprocessorRequest.getCall()); + if (execController.getFailedOn() != null) { + throw execController.getFailedOn(); + } + return result; + } catch (IOException ie) { + throw new ServiceException(ie); + } + } + + }); + futures.put(coprocessorRequest.getRegion().toByteArray(), future); + } + + MultiCoprocessorServiceResponse.Builder builder = MultiCoprocessorServiceResponse + .newBuilder(); + for (Map.Entry> e : futures.entrySet()) { + CoprocessorServiceResponseOrException.Builder coprocessorServiceResponseOrExceptionBuilder = + CoprocessorServiceResponseOrException.newBuilder(); + RegionSpecifier regionSpecifier = RequestConverter.buildRegionSpecifier( + RegionSpecifierType.REGION_NAME, e.getKey()); + coprocessorServiceResponseOrExceptionBuilder.setRegion(regionSpecifier); + try { + Message result = e.getValue().get(); + CoprocessorServiceResponse.Builder crBuilder = CoprocessorServiceResponse.newBuilder(); + crBuilder.setRegion(regionSpecifier); + crBuilder.setValue(crBuilder.getValueBuilder().setName(result.getClass().getName()) + .setValue(result.toByteString())); + coprocessorServiceResponseOrExceptionBuilder.setResponse( + crBuilder.build()); + } catch (ExecutionException ee) { + LOG.warn("Error calling coprocessor service for row " + Bytes.toStringBinary(e.getKey()), + ee); + coprocessorServiceResponseOrExceptionBuilder.setException(ResponseConverter + .buildException(new ServiceException(ee))); + } catch (InterruptedException ie) { + Thread.currentThread().interrupt(); + LOG.warn( + "Interrupted calling coprocessor service for row " + Bytes.toStringBinary(e.getKey()), + ie); + coprocessorServiceResponseOrExceptionBuilder.setException(ResponseConverter + .buildException(new ServiceException(ie))); + } + builder.addResultOrException(coprocessorServiceResponseOrExceptionBuilder.build()); + } + return builder.build(); + } } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java index 6352c4a..9317e76 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java @@ -53,6 +53,7 @@ import org.apache.hadoop.hbase.client.Row; import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.rest.Constants; @@ -64,6 +65,8 @@ import org.apache.hadoop.hbase.rest.model.TableSchemaModel; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.util.StringUtils; +import com.google.protobuf.Descriptors; +import com.google.protobuf.Message; import com.google.protobuf.Service; import com.google.protobuf.ServiceException; @@ -797,4 +800,19 @@ public class RemoteHTable implements HTableInterface { long amount, boolean writeToWAL) throws IOException { throw new IOException("incrementColumnValue not supported"); } + + @Override + public Map batchCoprocessorService( + Descriptors.ServiceDescriptor serviceDescriptor, String method, Message message, + byte[] startKey, byte[] endKey, R responsePrototype) throws ServiceException, Throwable { + throw new UnsupportedOperationException("batchCoprocessorService not implemented"); + } + + @Override + public void batchCoprocessorService( + Descriptors.ServiceDescriptor serviceDescriptor, String method, Message message, + byte[] startKey, byte[] endKey, Callback callback, R responsePrototype) + throws ServiceException, Throwable { + throw new UnsupportedOperationException("batchCoprocessorService not implemented"); + } } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java new file mode 100644 index 0000000..17c6d33 --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java @@ -0,0 +1,102 @@ +package org.apache.hadoop.hbase.coprocessor; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.Coprocessor; +import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationServiceNullResponse; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.regionserver.HRegion; +import org.apache.hadoop.hbase.regionserver.InternalScanner; +import org.apache.hadoop.hbase.util.Bytes; + +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; + +public class ColumnAggregationEndpointNullResponse + extends + ColumnAggregationServiceNullResponse +implements Coprocessor, CoprocessorService { + static final Log LOG = LogFactory.getLog(ColumnAggregationEndpointNullResponse.class); + private RegionCoprocessorEnvironment env = null; + @Override + public Service getService() { + return this; + } + + @Override + public void start(CoprocessorEnvironment env) throws IOException { + if (env instanceof RegionCoprocessorEnvironment) { + this.env = (RegionCoprocessorEnvironment)env; + return; + } + throw new CoprocessorException("Must be loaded on a table region!"); + } + + @Override + public void stop(CoprocessorEnvironment env) throws IOException { + // Nothing to do. + } + + @Override + public void sum(RpcController controller, SumRequest request, RpcCallback done) { + // aggregate at each region + Scan scan = new Scan(); + // Family is required in pb. Qualifier is not. + byte[] family = request.getFamily().toByteArray(); + byte[] qualifier = request.hasQualifier() ? request.getQualifier().toByteArray() : null; + if (request.hasQualifier()) { + scan.addColumn(family, qualifier); + } else { + scan.addFamily(family); + } + int sumResult = 0; + InternalScanner scanner = null; + try { + HRegion region = this.env.getRegion(); + if (Bytes.equals(region.getEndKey(), HConstants.EMPTY_END_ROW)) { + done.run(null); + return; + } + scanner = region.getScanner(scan); + List curVals = new ArrayList(); + boolean hasMore = false; + do { + curVals.clear(); + hasMore = scanner.next(curVals); + for (Cell kv : curVals) { + if (CellUtil.matchingQualifier(kv, qualifier)) { + sumResult += Bytes.toInt(kv.getValueArray(), kv.getValueOffset()); + } + } + } while (hasMore); + } catch (IOException e) { + ResponseConverter.setControllerException(controller, e); + // Set result to -1 to indicate error. + sumResult = -1; + LOG.info("Setting sum result to -1 to indicate error", e); + } finally { + if (scanner != null) { + try { + scanner.close(); + } catch (IOException e) { + ResponseConverter.setControllerException(controller, e); + sumResult = -1; + LOG.info("Setting sum result to -1 to indicate error", e); + } + } + } + done.run(SumResponse.newBuilder().setSum(sumResult).build()); + } +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java new file mode 100644 index 0000000..bbc4544 --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java @@ -0,0 +1,101 @@ +package org.apache.hadoop.hbase.coprocessor; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.Coprocessor; +import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.regionserver.HRegion; +import org.apache.hadoop.hbase.regionserver.InternalScanner; +import org.apache.hadoop.hbase.util.Bytes; + +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; + +public class ColumnAggregationEndpointWithErrors + extends + ColumnAggregationWithErrorsProtos.ColumnAggregationServiceWithErrors +implements Coprocessor, CoprocessorService { + static final Log LOG = LogFactory.getLog(ColumnAggregationEndpointWithErrors.class); + private RegionCoprocessorEnvironment env = null; + @Override + public Service getService() { + return this; + } + + @Override + public void start(CoprocessorEnvironment env) throws IOException { + if (env instanceof RegionCoprocessorEnvironment) { + this.env = (RegionCoprocessorEnvironment)env; + return; + } + throw new CoprocessorException("Must be loaded on a table region!"); + } + + @Override + public void stop(CoprocessorEnvironment env) throws IOException { + // Nothing to do. + } + + @Override + public void sum(RpcController controller, SumRequest request, RpcCallback done) { + // aggregate at each region + Scan scan = new Scan(); + // Family is required in pb. Qualifier is not. + byte[] family = request.getFamily().toByteArray(); + byte[] qualifier = request.hasQualifier() ? request.getQualifier().toByteArray() : null; + if (request.hasQualifier()) { + scan.addColumn(family, qualifier); + } else { + scan.addFamily(family); + } + int sumResult = 0; + InternalScanner scanner = null; + try { + HRegion region = this.env.getRegion(); + if (Bytes.equals(region.getEndKey(), HConstants.EMPTY_END_ROW)) { + throw new IOException("An expected exception"); + } + scanner = region.getScanner(scan); + List curVals = new ArrayList(); + boolean hasMore = false; + do { + curVals.clear(); + hasMore = scanner.next(curVals); + for (Cell kv : curVals) { + if (CellUtil.matchingQualifier(kv, qualifier)) { + sumResult += Bytes.toInt(kv.getValueArray(), kv.getValueOffset()); + } + } + } while (hasMore); + } catch (IOException e) { + ResponseConverter.setControllerException(controller, e); + // Set result to -1 to indicate error. + sumResult = -1; + LOG.info("Setting sum result to -1 to indicate error", e); + } finally { + if (scanner != null) { + try { + scanner.close(); + } catch (IOException e) { + ResponseConverter.setControllerException(controller, e); + sumResult = -1; + LOG.info("Setting sum result to -1 to indicate error", e); + } + } + } + done.run(SumResponse.newBuilder().setSum(sumResult).build()); + } +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java new file mode 100644 index 0000000..fff804c --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java @@ -0,0 +1,306 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.coprocessor; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.util.HashMap; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationServiceNullResponse; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import com.google.protobuf.ServiceException; +import com.google.protobuf.ZeroCopyLiteralByteString; + +/** + * TestEndpoint: test cases to verify the batch execution of coprocessor Endpoint + */ +@Category(MediumTests.class) +public class TestBatchCoprocessorEndpoint { + private static final Log LOG = LogFactory.getLog(TestBatchCoprocessorEndpoint.class); + + private static final TableName TEST_TABLE = + TableName.valueOf("TestTable"); + private static final byte[] TEST_FAMILY = Bytes.toBytes("TestFamily"); + private static final byte[] TEST_QUALIFIER = Bytes.toBytes("TestQualifier"); + private static byte[] ROW = Bytes.toBytes("testRow"); + + private static final int ROWSIZE = 20; + private static final int rowSeperator1 = 5; + private static final int rowSeperator2 = 12; + private static byte[][] ROWS = makeN(ROW, ROWSIZE); + + private static HBaseTestingUtility util = new HBaseTestingUtility(); + + @BeforeClass + public static void setupBeforeClass() throws Exception { + // set configure to indicate which cp should be loaded + Configuration conf = util.getConfiguration(); + conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, + org.apache.hadoop.hbase.coprocessor.ColumnAggregationEndpoint.class.getName(), + ProtobufCoprocessorService.class.getName(), + ColumnAggregationEndpointWithErrors.class.getName(), + ColumnAggregationEndpointNullResponse.class.getName()); + conf.setStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, + ProtobufCoprocessorService.class.getName()); + util.startMiniCluster(2); + HBaseAdmin admin = new HBaseAdmin(conf); + HTableDescriptor desc = new HTableDescriptor(TEST_TABLE); + desc.addFamily(new HColumnDescriptor(TEST_FAMILY)); + admin.createTable(desc, new byte[][]{ROWS[rowSeperator1], ROWS[rowSeperator2]}); + util.waitUntilAllRegionsAssigned(TEST_TABLE); + admin.close(); + + HTable table = new HTable(conf, TEST_TABLE); + for (int i = 0; i < ROWSIZE; i++) { + Put put = new Put(ROWS[i]); + put.add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(i)); + table.put(put); + } + table.close(); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + util.shutdownMiniCluster(); + } + + @Test + public void testAggregationNullResponse() throws Throwable { + HTable table = new HTable(util.getConfiguration(), TEST_TABLE); + final Map results = + new HashMap(); + ColumnAggregationWithNullResponseProtos.SumRequest.Builder builder = + ColumnAggregationWithNullResponseProtos.SumRequest + .newBuilder(); + builder.setFamily(ZeroCopyLiteralByteString.wrap(TEST_FAMILY)); + if (TEST_QUALIFIER != null && TEST_QUALIFIER.length > 0) { + builder.setQualifier(ZeroCopyLiteralByteString.wrap(TEST_QUALIFIER)); + } + table.batchCoprocessorService( + ColumnAggregationServiceNullResponse.getDescriptor(), "sum", + builder.build(), ROWS[0], ROWS[ROWS.length - 1], + new Batch.Callback() { + + @Override + public void update(byte[] region, byte[] row, + ColumnAggregationWithNullResponseProtos.SumResponse result) { + results.put(region, result); + } + + }, ColumnAggregationWithNullResponseProtos.SumResponse.getDefaultInstance()); + + int sumResult = 0; + int expectedResult = 0; + for (Map.Entry e : results.entrySet()) { + LOG.info("Got value " + e.getValue().getSum() + " for region " + + Bytes.toStringBinary(e.getKey())); + sumResult += e.getValue().getSum(); + } + for (int i = 0; i < rowSeperator2; i++) { + expectedResult += i; + } + assertEquals("Invalid result", expectedResult, sumResult); + table.close(); + } + + private static byte[][] makeN(byte[] base, int n) { + byte[][] ret = new byte[n][]; + for (int i = 0; i < n; i++) { + ret[i] = Bytes.add(base, Bytes.toBytes(String.format("%02d", i))); + } + return ret; + } + + private Map sum(final HTable table, final byte[] family, + final byte[] qualifier, final byte[] start, final byte[] end) throws ServiceException, + Throwable { + ColumnAggregationProtos.SumRequest.Builder builder = ColumnAggregationProtos.SumRequest + .newBuilder(); + builder.setFamily(ZeroCopyLiteralByteString.wrap(family)); + if (qualifier != null && qualifier.length > 0) { + builder.setQualifier(ZeroCopyLiteralByteString.wrap(qualifier)); + } + return table.batchCoprocessorService( + ColumnAggregationProtos.ColumnAggregationService.getDescriptor(), "sum", builder.build(), + start, end, ColumnAggregationProtos.SumResponse.getDefaultInstance()); + } + + @Test + public void testAggregationWithReturnValue() throws Throwable { + HTable table = new HTable(util.getConfiguration(), TEST_TABLE); + Map results = sum(table, TEST_FAMILY, TEST_QUALIFIER, ROWS[0], + ROWS[ROWS.length - 1]); + int sumResult = 0; + int expectedResult = 0; + for (Map.Entry e : results.entrySet()) { + LOG.info("Got value " + e.getValue().getSum() + " for region " + + Bytes.toStringBinary(e.getKey())); + sumResult += e.getValue().getSum(); + } + for (int i = 0; i < ROWSIZE; i++) { + expectedResult += i; + } + assertEquals("Invalid result", expectedResult, sumResult); + + results.clear(); + + // scan: for region 2 and region 3 + results = sum(table, TEST_FAMILY, TEST_QUALIFIER, ROWS[rowSeperator1], + ROWS[ROWS.length - 1]); + sumResult = 0; + expectedResult = 0; + for (Map.Entry e : results.entrySet()) { + LOG.info("Got value " + e.getValue().getSum() + " for region " + + Bytes.toStringBinary(e.getKey())); + sumResult += e.getValue().getSum(); + } + for (int i = rowSeperator1; i < ROWSIZE; i++) { + expectedResult += i; + } + assertEquals("Invalid result", expectedResult, sumResult); + table.close(); + } + + @Test + public void testAggregation() throws Throwable { + HTable table = new HTable(util.getConfiguration(), TEST_TABLE); + final Map results = new HashMap(); + ColumnAggregationProtos.SumRequest.Builder builder = ColumnAggregationProtos.SumRequest + .newBuilder(); + builder.setFamily(ZeroCopyLiteralByteString.wrap(TEST_FAMILY)); + if (TEST_QUALIFIER != null && TEST_QUALIFIER.length > 0) { + builder.setQualifier(ZeroCopyLiteralByteString.wrap(TEST_QUALIFIER)); + } + table.batchCoprocessorService(ColumnAggregationProtos.ColumnAggregationService.getDescriptor(), + "sum", builder.build(), ROWS[0], ROWS[ROWS.length - 1], new Batch.Callback() { + + @Override + public void update(byte[] region, byte[] row, SumResponse result) { + results.put(region, result); + } + + }, ColumnAggregationProtos.SumResponse.getDefaultInstance()); + int sumResult = 0; + int expectedResult = 0; + for (Map.Entry e : results.entrySet()) { + LOG.info("Got value " + e.getValue().getSum() + " for region " + + Bytes.toStringBinary(e.getKey())); + sumResult += e.getValue().getSum(); + } + for (int i = 0; i < ROWSIZE; i++) { + expectedResult += i; + } + assertEquals("Invalid result", expectedResult, sumResult); + + results.clear(); + + // scan: for region 2 and region 3 + table.batchCoprocessorService(ColumnAggregationProtos.ColumnAggregationService.getDescriptor(), + "sum", builder.build(), ROWS[rowSeperator1], ROWS[ROWS.length - 1], + new Batch.Callback() { + + @Override + public void update(byte[] region, byte[] row, SumResponse result) { + results.put(region, result); + } + + }, ColumnAggregationProtos.SumResponse.getDefaultInstance()); + sumResult = 0; + expectedResult = 0; + for (Map.Entry e : results.entrySet()) { + LOG.info("Got value " + e.getValue().getSum() + " for region " + + Bytes.toStringBinary(e.getKey())); + sumResult += e.getValue().getSum(); + } + for (int i = rowSeperator1; i < ROWSIZE; i++) { + expectedResult += i; + } + assertEquals("Invalid result", expectedResult, sumResult); + table.close(); + } + + @Test + public void testAggregationWithErrors() throws Throwable { + HTable table = new HTable(util.getConfiguration(), TEST_TABLE); + final Map results = + new HashMap(); + ColumnAggregationWithErrorsProtos.SumRequest.Builder builder = + ColumnAggregationWithErrorsProtos.SumRequest + .newBuilder(); + builder.setFamily(ZeroCopyLiteralByteString.wrap(TEST_FAMILY)); + if (TEST_QUALIFIER != null && TEST_QUALIFIER.length > 0) { + builder.setQualifier(ZeroCopyLiteralByteString.wrap(TEST_QUALIFIER)); + } + boolean hasError = false; + try { + table.batchCoprocessorService( + ColumnAggregationWithErrorsProtos.ColumnAggregationServiceWithErrors.getDescriptor(), "sum", + builder.build(), ROWS[0], ROWS[ROWS.length - 1], + new Batch.Callback() { + + @Override + public void update(byte[] region, byte[] row, + ColumnAggregationWithErrorsProtos.SumResponse result) { + results.put(region, result); + } + + }, ColumnAggregationWithErrorsProtos.SumResponse.getDefaultInstance()); + } catch (Throwable t) { + LOG.info("Exeptions in coprocessor service", t); + hasError = true; + } + + int sumResult = 0; + int expectedResult = 0; + for (Map.Entry e : results.entrySet()) { + LOG.info("Got value " + e.getValue().getSum() + " for region " + + Bytes.toStringBinary(e.getKey())); + sumResult += e.getValue().getSum(); + } + for (int i = 0; i < rowSeperator2; i++) { + expectedResult += i; + } + assertEquals("Invalid result", expectedResult, sumResult); + assertTrue(hasError); + table.close(); + } +} \ No newline at end of file diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithErrorsProtos.java hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithErrorsProtos.java new file mode 100644 index 0000000..6768aa2 --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithErrorsProtos.java @@ -0,0 +1,1278 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: ColumnAggregationWithErrorsProtocol.proto + +package org.apache.hadoop.hbase.coprocessor.protobuf.generated; + +public final class ColumnAggregationWithErrorsProtos { + private ColumnAggregationWithErrorsProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface SumRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes family = 1; + /** + * required bytes family = 1; + */ + boolean hasFamily(); + /** + * required bytes family = 1; + */ + com.google.protobuf.ByteString getFamily(); + + // optional bytes qualifier = 2; + /** + * optional bytes qualifier = 2; + */ + boolean hasQualifier(); + /** + * optional bytes qualifier = 2; + */ + com.google.protobuf.ByteString getQualifier(); + } + /** + * Protobuf type {@code SumRequest} + */ + public static final class SumRequest extends + com.google.protobuf.GeneratedMessage + implements SumRequestOrBuilder { + // Use SumRequest.newBuilder() to construct. + private SumRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private SumRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final SumRequest defaultInstance; + public static SumRequest getDefaultInstance() { + return defaultInstance; + } + + public SumRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SumRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + qualifier_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_SumRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_SumRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SumRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SumRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bytes family = 1; + public static final int FAMILY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString family_; + /** + * required bytes family = 1; + */ + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes family = 1; + */ + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // optional bytes qualifier = 2; + public static final int QUALIFIER_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString qualifier_; + /** + * optional bytes qualifier = 2; + */ + public boolean hasQualifier() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional bytes qualifier = 2; + */ + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + + private void initFields() { + family_ = com.google.protobuf.ByteString.EMPTY; + qualifier_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, qualifier_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, qualifier_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest) obj; + + boolean result = true; + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && (hasQualifier() == other.hasQualifier()); + if (hasQualifier()) { + result = result && getQualifier() + .equals(other.getQualifier()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (hasQualifier()) { + hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getQualifier().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code SumRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_SumRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_SumRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + qualifier_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_SumRequest_descriptor; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.family_ = family_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.qualifier_ = qualifier_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest.getDefaultInstance()) return this; + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (other.hasQualifier()) { + setQualifier(other.getQualifier()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFamily()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bytes family = 1; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes family = 1; + */ + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes family = 1; + */ + public com.google.protobuf.ByteString getFamily() { + return family_; + } + /** + * required bytes family = 1; + */ + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + family_ = value; + onChanged(); + return this; + } + /** + * required bytes family = 1; + */ + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000001); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // optional bytes qualifier = 2; + private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes qualifier = 2; + */ + public boolean hasQualifier() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional bytes qualifier = 2; + */ + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + /** + * optional bytes qualifier = 2; + */ + public Builder setQualifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + qualifier_ = value; + onChanged(); + return this; + } + /** + * optional bytes qualifier = 2; + */ + public Builder clearQualifier() { + bitField0_ = (bitField0_ & ~0x00000002); + qualifier_ = getDefaultInstance().getQualifier(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:SumRequest) + } + + static { + defaultInstance = new SumRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SumRequest) + } + + public interface SumResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required int64 sum = 1; + /** + * required int64 sum = 1; + */ + boolean hasSum(); + /** + * required int64 sum = 1; + */ + long getSum(); + } + /** + * Protobuf type {@code SumResponse} + */ + public static final class SumResponse extends + com.google.protobuf.GeneratedMessage + implements SumResponseOrBuilder { + // Use SumResponse.newBuilder() to construct. + private SumResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private SumResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final SumResponse defaultInstance; + public static SumResponse getDefaultInstance() { + return defaultInstance; + } + + public SumResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SumResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + sum_ = input.readInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_SumResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_SumResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SumResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SumResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required int64 sum = 1; + public static final int SUM_FIELD_NUMBER = 1; + private long sum_; + /** + * required int64 sum = 1; + */ + public boolean hasSum() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required int64 sum = 1; + */ + public long getSum() { + return sum_; + } + + private void initFields() { + sum_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasSum()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt64(1, sum_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, sum_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse) obj; + + boolean result = true; + result = result && (hasSum() == other.hasSum()); + if (hasSum()) { + result = result && (getSum() + == other.getSum()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasSum()) { + hash = (37 * hash) + SUM_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getSum()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code SumResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_SumResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_SumResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + sum_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_SumResponse_descriptor; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.sum_ = sum_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse.getDefaultInstance()) return this; + if (other.hasSum()) { + setSum(other.getSum()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasSum()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required int64 sum = 1; + private long sum_ ; + /** + * required int64 sum = 1; + */ + public boolean hasSum() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required int64 sum = 1; + */ + public long getSum() { + return sum_; + } + /** + * required int64 sum = 1; + */ + public Builder setSum(long value) { + bitField0_ |= 0x00000001; + sum_ = value; + onChanged(); + return this; + } + /** + * required int64 sum = 1; + */ + public Builder clearSum() { + bitField0_ = (bitField0_ & ~0x00000001); + sum_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:SumResponse) + } + + static { + defaultInstance = new SumResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SumResponse) + } + + /** + * Protobuf service {@code ColumnAggregationServiceWithErrors} + */ + public static abstract class ColumnAggregationServiceWithErrors + implements com.google.protobuf.Service { + protected ColumnAggregationServiceWithErrors() {} + + public interface Interface { + /** + * rpc sum(.SumRequest) returns (.SumResponse); + */ + public abstract void sum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new ColumnAggregationServiceWithErrors() { + @java.lang.Override + public void sum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest request, + com.google.protobuf.RpcCallback done) { + impl.sum(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.sum(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + /** + * rpc sum(.SumRequest) returns (.SumResponse); + */ + public abstract void sum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.sum(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationServiceWithErrors implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void sum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse sum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse sum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.SumResponse.getDefaultInstance()); + } + + } + + // @@protoc_insertion_point(class_scope:ColumnAggregationServiceWithErrors) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SumRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SumRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SumResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SumResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n)ColumnAggregationWithErrorsProtocol.pr" + + "oto\"/\n\nSumRequest\022\016\n\006family\030\001 \002(\014\022\021\n\tqua" + + "lifier\030\002 \001(\014\"\032\n\013SumResponse\022\013\n\003sum\030\001 \002(\003" + + "2F\n\"ColumnAggregationServiceWithErrors\022 " + + "\n\003sum\022\013.SumRequest\032\014.SumResponseBa\n6org." + + "apache.hadoop.hbase.coprocessor.protobuf" + + ".generatedB!ColumnAggregationWithErrorsP" + + "rotos\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_SumRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_SumRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SumRequest_descriptor, + new java.lang.String[] { "Family", "Qualifier", }); + internal_static_SumResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_SumResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SumResponse_descriptor, + new java.lang.String[] { "Sum", }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java new file mode 100644 index 0000000..c0b1917 --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java @@ -0,0 +1,1270 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: ColumnAggregationNullResponseProtocol.proto + +package org.apache.hadoop.hbase.coprocessor.protobuf.generated; + +public final class ColumnAggregationWithNullResponseProtos { + private ColumnAggregationWithNullResponseProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface SumRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes family = 1; + /** + * required bytes family = 1; + */ + boolean hasFamily(); + /** + * required bytes family = 1; + */ + com.google.protobuf.ByteString getFamily(); + + // optional bytes qualifier = 2; + /** + * optional bytes qualifier = 2; + */ + boolean hasQualifier(); + /** + * optional bytes qualifier = 2; + */ + com.google.protobuf.ByteString getQualifier(); + } + /** + * Protobuf type {@code SumRequest} + */ + public static final class SumRequest extends + com.google.protobuf.GeneratedMessage + implements SumRequestOrBuilder { + // Use SumRequest.newBuilder() to construct. + private SumRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private SumRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final SumRequest defaultInstance; + public static SumRequest getDefaultInstance() { + return defaultInstance; + } + + public SumRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SumRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + qualifier_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SumRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SumRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bytes family = 1; + public static final int FAMILY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString family_; + /** + * required bytes family = 1; + */ + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes family = 1; + */ + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // optional bytes qualifier = 2; + public static final int QUALIFIER_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString qualifier_; + /** + * optional bytes qualifier = 2; + */ + public boolean hasQualifier() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional bytes qualifier = 2; + */ + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + + private void initFields() { + family_ = com.google.protobuf.ByteString.EMPTY; + qualifier_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, qualifier_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, qualifier_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest) obj; + + boolean result = true; + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && (hasQualifier() == other.hasQualifier()); + if (hasQualifier()) { + result = result && getQualifier() + .equals(other.getQualifier()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (hasQualifier()) { + hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getQualifier().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code SumRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + qualifier_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumRequest_descriptor; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.family_ = family_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.qualifier_ = qualifier_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.getDefaultInstance()) return this; + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (other.hasQualifier()) { + setQualifier(other.getQualifier()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFamily()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bytes family = 1; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + /** + * required bytes family = 1; + */ + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bytes family = 1; + */ + public com.google.protobuf.ByteString getFamily() { + return family_; + } + /** + * required bytes family = 1; + */ + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + family_ = value; + onChanged(); + return this; + } + /** + * required bytes family = 1; + */ + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000001); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // optional bytes qualifier = 2; + private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes qualifier = 2; + */ + public boolean hasQualifier() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional bytes qualifier = 2; + */ + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + /** + * optional bytes qualifier = 2; + */ + public Builder setQualifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + qualifier_ = value; + onChanged(); + return this; + } + /** + * optional bytes qualifier = 2; + */ + public Builder clearQualifier() { + bitField0_ = (bitField0_ & ~0x00000002); + qualifier_ = getDefaultInstance().getQualifier(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:SumRequest) + } + + static { + defaultInstance = new SumRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SumRequest) + } + + public interface SumResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional int64 sum = 1; + /** + * optional int64 sum = 1; + */ + boolean hasSum(); + /** + * optional int64 sum = 1; + */ + long getSum(); + } + /** + * Protobuf type {@code SumResponse} + */ + public static final class SumResponse extends + com.google.protobuf.GeneratedMessage + implements SumResponseOrBuilder { + // Use SumResponse.newBuilder() to construct. + private SumResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private SumResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final SumResponse defaultInstance; + public static SumResponse getDefaultInstance() { + return defaultInstance; + } + + public SumResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SumResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + sum_ = input.readInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SumResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SumResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // optional int64 sum = 1; + public static final int SUM_FIELD_NUMBER = 1; + private long sum_; + /** + * optional int64 sum = 1; + */ + public boolean hasSum() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional int64 sum = 1; + */ + public long getSum() { + return sum_; + } + + private void initFields() { + sum_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt64(1, sum_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, sum_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse) obj; + + boolean result = true; + result = result && (hasSum() == other.hasSum()); + if (hasSum()) { + result = result && (getSum() + == other.getSum()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasSum()) { + hash = (37 * hash) + SUM_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getSum()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code SumResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + sum_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_SumResponse_descriptor; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.sum_ = sum_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse.getDefaultInstance()) return this; + if (other.hasSum()) { + setSum(other.getSum()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional int64 sum = 1; + private long sum_ ; + /** + * optional int64 sum = 1; + */ + public boolean hasSum() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional int64 sum = 1; + */ + public long getSum() { + return sum_; + } + /** + * optional int64 sum = 1; + */ + public Builder setSum(long value) { + bitField0_ |= 0x00000001; + sum_ = value; + onChanged(); + return this; + } + /** + * optional int64 sum = 1; + */ + public Builder clearSum() { + bitField0_ = (bitField0_ & ~0x00000001); + sum_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:SumResponse) + } + + static { + defaultInstance = new SumResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SumResponse) + } + + /** + * Protobuf service {@code ColumnAggregationServiceNullResponse} + */ + public static abstract class ColumnAggregationServiceNullResponse + implements com.google.protobuf.Service { + protected ColumnAggregationServiceNullResponse() {} + + public interface Interface { + /** + * rpc sum(.SumRequest) returns (.SumResponse); + */ + public abstract void sum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new ColumnAggregationServiceNullResponse() { + @java.lang.Override + public void sum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest request, + com.google.protobuf.RpcCallback done) { + impl.sum(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.sum(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + /** + * rpc sum(.SumRequest) returns (.SumResponse); + */ + public abstract void sum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.sum(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationServiceNullResponse implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void sum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse sum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse sum( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.SumResponse.getDefaultInstance()); + } + + } + + // @@protoc_insertion_point(class_scope:ColumnAggregationServiceNullResponse) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SumRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SumRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SumResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SumResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n+ColumnAggregationNullResponseProtocol." + + "proto\"/\n\nSumRequest\022\016\n\006family\030\001 \002(\014\022\021\n\tq" + + "ualifier\030\002 \001(\014\"\032\n\013SumResponse\022\013\n\003sum\030\001 \001" + + "(\0032H\n$ColumnAggregationServiceNullRespon" + + "se\022 \n\003sum\022\013.SumRequest\032\014.SumResponseBg\n6" + + "org.apache.hadoop.hbase.coprocessor.prot" + + "obuf.generatedB\'ColumnAggregationWithNul" + + "lResponseProtos\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_SumRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_SumRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SumRequest_descriptor, + new java.lang.String[] { "Family", "Qualifier", }); + internal_static_SumResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_SumResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SumResponse_descriptor, + new java.lang.String[] { "Sum", }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java index 45b9885..a80b9f2 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java @@ -79,6 +79,8 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceRequest; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiCoprocessorServiceResponse; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse; @@ -560,4 +562,10 @@ ClientProtos.ClientService.BlockingInterface, RegionServerServices { public ServerNonceManager getNonceManager() { return null; } + + @Override + public MultiCoprocessorServiceResponse execMultiService(RpcController controller, + MultiCoprocessorServiceRequest request) throws ServiceException { + return null; + } } diff --git hbase-server/src/test/protobuf/ColumnAggregationNullResponseProtocol.proto hbase-server/src/test/protobuf/ColumnAggregationNullResponseProtocol.proto new file mode 100644 index 0000000..39bb05f --- /dev/null +++ hbase-server/src/test/protobuf/ColumnAggregationNullResponseProtocol.proto @@ -0,0 +1,35 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Coprocessor test +option java_package = "org.apache.hadoop.hbase.coprocessor.protobuf.generated"; +option java_outer_classname = "ColumnAggregationWithNullResponseProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; + +message SumRequest { + required bytes family = 1; + optional bytes qualifier = 2; +} + +message SumResponse { + optional int64 sum = 1; +} + +service ColumnAggregationServiceNullResponse { + rpc sum(SumRequest) returns(SumResponse); +} diff --git hbase-server/src/test/protobuf/ColumnAggregationWithErrorsProtocol.proto hbase-server/src/test/protobuf/ColumnAggregationWithErrorsProtocol.proto new file mode 100644 index 0000000..c195c33 --- /dev/null +++ hbase-server/src/test/protobuf/ColumnAggregationWithErrorsProtocol.proto @@ -0,0 +1,35 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Coprocessor test +option java_package = "org.apache.hadoop.hbase.coprocessor.protobuf.generated"; +option java_outer_classname = "ColumnAggregationWithErrorsProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; + +message SumRequest { + required bytes family = 1; + optional bytes qualifier = 2; +} + +message SumResponse { + required int64 sum = 1; +} + +service ColumnAggregationServiceWithErrors { + rpc sum(SumRequest) returns(SumResponse); +}