diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java index 02106d3..2e0c05e 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java @@ -678,9 +678,10 @@ public class AggregationClient { final AggregateArgument.Builder requestBuilder = AggregateArgument.newBuilder(); requestBuilder.setInterpreterClassName(ci.getClass().getCanonicalName()); - if (ci.columnInterpreterSpecificData() != null) { - requestBuilder.setInterpreterSpecificBytes( - ci.columnInterpreterSpecificData()); + ByteString columnInterpreterSpecificData = null; + if ((columnInterpreterSpecificData = ci.columnInterpreterSpecificData()) + != null) { + requestBuilder.setInterpreterSpecificBytes(columnInterpreterSpecificData); } requestBuilder.setScan(ProtobufUtil.toScan(scan)); return requestBuilder.build(); diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java new file mode 100644 index 0000000..747d511 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.client.coprocessor; + +import java.io.IOException; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest; +import org.apache.hadoop.hbase.regionserver.RowProcessor; + +import com.google.protobuf.Message; +/** + * Convenience class that is used to make RowProcessorEndpoint invocations. + * For example usage, refer TestRowProcessorEndpoint + * + */ +@InterfaceAudience.Public +@InterfaceStability.Evolving +public class RowProcessorClient { + public static + RowProcessorRequest getRowProcessorPB(RowProcessor r) + throws IOException { + final RowProcessorRequest.Builder requestBuilder = + RowProcessorRequest.newBuilder(); + requestBuilder.setRowProcessorClassName(r.getClass().getName()); + S s = r.getRequestData(); + if (s != null) { + requestBuilder.setRowProcessorInitializerMessageName(s.getClass().getName()); + requestBuilder.setRowProcessorInitializerMessage(s.toByteString()); + } + return requestBuilder.build(); + } +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java index 9ee63ff..368f1f7 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java @@ -18,21 +18,35 @@ package org.apache.hadoop.hbase.coprocessor; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.Coprocessor; +import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest; +import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult; +import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorService; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.RowProcessor; +import com.google.protobuf.ByteString; +import com.google.protobuf.Message; +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; + /** * This class demonstrates how to implement atomic read-modify-writes * using {@link HRegion#processRowsWithLocks()} and Coprocessor endpoints. */ @InterfaceAudience.Public @InterfaceStability.Evolving -public abstract class BaseRowProcessorEndpoint extends BaseEndpointCoprocessor - implements RowProcessorProtocol { - +public abstract class BaseRowProcessorEndpoint +extends RowProcessorService implements CoprocessorService, Coprocessor { + private RegionCoprocessorEnvironment env; /** * Pass a processor to HRegion to process multiple rows atomically. * @@ -42,16 +56,93 @@ public abstract class BaseRowProcessorEndpoint extends BaseEndpointCoprocessor * * See {@link TestRowProcessorEndpoint} for example. * - * @param processor The object defines the read-modify-write procedure - * @return The processing result + * The request contains information for constructing processor + * (see {@link #constructRowProcessorFromRequest}. The processor object defines + * the read-modify-write procedure. */ @Override - public T process(RowProcessor processor) - throws IOException { - HRegion region = - ((RegionCoprocessorEnvironment) getEnvironment()).getRegion(); - region.processRowsWithLocks(processor); - return processor.getResult(); + public void process(RpcController controller, RowProcessorRequest request, + RpcCallback done) { + RowProcessorResult resultProto = null; + try { + RowProcessor processor = constructRowProcessorFromRequest(request); + HRegion region = env.getRegion(); + region.processRowsWithLocks(processor); + T result = processor.getResult(); + RowProcessorResult.Builder b = RowProcessorResult.newBuilder(); + b.setRowProcessorResult(result.toByteString()); + resultProto = b.build(); + } catch (Exception e) { + ResponseConverter.setControllerException(controller, new IOException(e)); + } + done.run(resultProto); + } + + @Override + public Service getService() { + return this; + } + + /** + * Stores a reference to the coprocessor environment provided by the + * {@link org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost} from the region where this + * coprocessor is loaded. Since this is a coprocessor endpoint, it always expects to be loaded + * on a table region, so always expects this to be an instance of + * {@link RegionCoprocessorEnvironment}. + * @param env the environment provided by the coprocessor host + * @throws IOException if the provided environment is not an instance of + * {@code RegionCoprocessorEnvironment} + */ + @Override + public void start(CoprocessorEnvironment env) throws IOException { + if (env instanceof RegionCoprocessorEnvironment) { + this.env = (RegionCoprocessorEnvironment)env; + } else { + throw new CoprocessorException("Must be loaded on a table region!"); + } + } + + @Override + public void stop(CoprocessorEnvironment env) throws IOException { + // nothing to do } + @SuppressWarnings("unchecked") + RowProcessor constructRowProcessorFromRequest(RowProcessorRequest request) + throws IOException { + String className = request.getRowProcessorClassName(); + Class cls; + try { + cls = Class.forName(className); + RowProcessor ci = (RowProcessor) cls.newInstance(); + if (request.hasRowProcessorInitializerMessageName()) { + Class imn = Class.forName(request.getRowProcessorInitializerMessageName()) + .asSubclass(Message.class); + Method m; + try { + m = imn.getMethod("parseFrom", ByteString.class); + } catch (SecurityException e) { + throw new IOException(e); + } catch (NoSuchMethodException e) { + throw new IOException(e); + } + S s; + try { + s = (S)m.invoke(null,request.getRowProcessorInitializerMessage()); + } catch (IllegalArgumentException e) { + throw new IOException(e); + } catch (InvocationTargetException e) { + throw new IOException(e); + } + ci.initialize(s); + } + return ci; + } catch (ClassNotFoundException e) { + throw new IOException(e); + } catch (InstantiationException e) { + throw new IOException(e); + } catch (IllegalAccessException e) { + throw new IOException(e); + } + } } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java index 8db8093..d74929c 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java @@ -119,7 +119,7 @@ public interface ColumnInterpreter { /** * This method should return any additional data that is needed on the * server side to construct the ColumnInterpreter. The server - * will pass this to the {@link #initialize(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.ColumnInterpreter)} + * will pass this to the {@link #initialize(ByteString)} * method. If there is no ColumnInterpreter specific data (for e.g., * {@link LongColumnInterpreter}) then null should be returned. * @return the PB message @@ -161,4 +161,4 @@ public interface ColumnInterpreter { * @return cast */ T castToCellType(S response); -} \ No newline at end of file +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RowProcessorProtocol.java hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RowProcessorProtocol.java deleted file mode 100644 index c670c39..0000000 --- hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RowProcessorProtocol.java +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.coprocessor; - -import java.io.IOException; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; -import org.apache.hadoop.hbase.regionserver.HRegion; -import org.apache.hadoop.hbase.regionserver.RowProcessor; - -/** - * Defines a protocol to perform multi row transactions. - * See {@link BaseRowProcessorEndpoint} for the implementation. - * See {@link HRegion#processRowsWithLocks()} for detials. - */ -@InterfaceAudience.Public -@InterfaceStability.Evolving -public interface RowProcessorProtocol extends CoprocessorProtocol { - - /** - * @param processor The processor defines how to process the row - */ - T process(RowProcessor processor) throws IOException; -} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 41fc4fe..94dab62 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -28,6 +28,7 @@ import java.io.DataOutputStream; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.Method; +import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -945,6 +946,20 @@ public final class ProtobufUtil { builder.addAllKeyValueBytes(protos); return builder.build(); } + /** + * Get bytes from ByteString + */ + public static byte[] getBytesFromByteString(ByteString bs) { + ByteBuffer bb = bs.asReadOnlyByteBuffer(); + bb.rewind(); + byte[] bytes; + if (bb.hasArray()) { + bytes = bb.array(); + } else { + bytes = bs.toByteArray(); + } + return bytes; + } /** * Convert a protocol buffer Result to a client Result diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutationProcessorProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutationProcessorProtos.java new file mode 100644 index 0000000..18582d9 --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutationProcessorProtos.java @@ -0,0 +1,667 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: MultiRowMutationProcessorMessages.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class MultiRowMutationProcessorProtos { + private MultiRowMutationProcessorProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface MultiRowMutationProcessorRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class MultiRowMutationProcessorRequest extends + com.google.protobuf.GeneratedMessage + implements MultiRowMutationProcessorRequestOrBuilder { + // Use MultiRowMutationProcessorRequest.newBuilder() to construct. + private MultiRowMutationProcessorRequest(Builder builder) { + super(builder); + } + private MultiRowMutationProcessorRequest(boolean noInit) {} + + private static final MultiRowMutationProcessorRequest defaultInstance; + public static MultiRowMutationProcessorRequest getDefaultInstance() { + return defaultInstance; + } + + public MultiRowMutationProcessorRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:MultiRowMutationProcessorRequest) + } + + static { + defaultInstance = new MultiRowMutationProcessorRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MultiRowMutationProcessorRequest) + } + + public interface MultiRowMutationProcessorResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class MultiRowMutationProcessorResponse extends + com.google.protobuf.GeneratedMessage + implements MultiRowMutationProcessorResponseOrBuilder { + // Use MultiRowMutationProcessorResponse.newBuilder() to construct. + private MultiRowMutationProcessorResponse(Builder builder) { + super(builder); + } + private MultiRowMutationProcessorResponse(boolean noInit) {} + + private static final MultiRowMutationProcessorResponse defaultInstance; + public static MultiRowMutationProcessorResponse getDefaultInstance() { + return defaultInstance; + } + + public MultiRowMutationProcessorResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:MultiRowMutationProcessorResponse) + } + + static { + defaultInstance = new MultiRowMutationProcessorResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MultiRowMutationProcessorResponse) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MultiRowMutationProcessorRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MultiRowMutationProcessorResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\'MultiRowMutationProcessorMessages.prot" + + "o\"\"\n MultiRowMutationProcessorRequest\"#\n" + + "!MultiRowMutationProcessorResponseBR\n*or" + + "g.apache.hadoop.hbase.protobuf.generated" + + "B\037MultiRowMutationProcessorProtosH\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_MultiRowMutationProcessorRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MultiRowMutationProcessorRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.class, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.Builder.class); + internal_static_MultiRowMutationProcessorResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MultiRowMutationProcessorResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.class, + org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RowProcessorProtos.java hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RowProcessorProtos.java new file mode 100644 index 0000000..162639e --- /dev/null +++ hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RowProcessorProtos.java @@ -0,0 +1,1262 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: RowProcessor.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class RowProcessorProtos { + private RowProcessorProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface RowProcessorRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string rowProcessorClassName = 1; + boolean hasRowProcessorClassName(); + String getRowProcessorClassName(); + + // optional string rowProcessorInitializerMessageName = 2; + boolean hasRowProcessorInitializerMessageName(); + String getRowProcessorInitializerMessageName(); + + // optional bytes rowProcessorInitializerMessage = 3; + boolean hasRowProcessorInitializerMessage(); + com.google.protobuf.ByteString getRowProcessorInitializerMessage(); + } + public static final class RowProcessorRequest extends + com.google.protobuf.GeneratedMessage + implements RowProcessorRequestOrBuilder { + // Use RowProcessorRequest.newBuilder() to construct. + private RowProcessorRequest(Builder builder) { + super(builder); + } + private RowProcessorRequest(boolean noInit) {} + + private static final RowProcessorRequest defaultInstance; + public static RowProcessorRequest getDefaultInstance() { + return defaultInstance; + } + + public RowProcessorRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_fieldAccessorTable; + } + + private int bitField0_; + // required string rowProcessorClassName = 1; + public static final int ROWPROCESSORCLASSNAME_FIELD_NUMBER = 1; + private java.lang.Object rowProcessorClassName_; + public boolean hasRowProcessorClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getRowProcessorClassName() { + java.lang.Object ref = rowProcessorClassName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + rowProcessorClassName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getRowProcessorClassNameBytes() { + java.lang.Object ref = rowProcessorClassName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + rowProcessorClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string rowProcessorInitializerMessageName = 2; + public static final int ROWPROCESSORINITIALIZERMESSAGENAME_FIELD_NUMBER = 2; + private java.lang.Object rowProcessorInitializerMessageName_; + public boolean hasRowProcessorInitializerMessageName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getRowProcessorInitializerMessageName() { + java.lang.Object ref = rowProcessorInitializerMessageName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + rowProcessorInitializerMessageName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getRowProcessorInitializerMessageNameBytes() { + java.lang.Object ref = rowProcessorInitializerMessageName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + rowProcessorInitializerMessageName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional bytes rowProcessorInitializerMessage = 3; + public static final int ROWPROCESSORINITIALIZERMESSAGE_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString rowProcessorInitializerMessage_; + public boolean hasRowProcessorInitializerMessage() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getRowProcessorInitializerMessage() { + return rowProcessorInitializerMessage_; + } + + private void initFields() { + rowProcessorClassName_ = ""; + rowProcessorInitializerMessageName_ = ""; + rowProcessorInitializerMessage_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRowProcessorClassName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getRowProcessorClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getRowProcessorInitializerMessageNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, rowProcessorInitializerMessage_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getRowProcessorClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getRowProcessorInitializerMessageNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, rowProcessorInitializerMessage_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest other = (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest) obj; + + boolean result = true; + result = result && (hasRowProcessorClassName() == other.hasRowProcessorClassName()); + if (hasRowProcessorClassName()) { + result = result && getRowProcessorClassName() + .equals(other.getRowProcessorClassName()); + } + result = result && (hasRowProcessorInitializerMessageName() == other.hasRowProcessorInitializerMessageName()); + if (hasRowProcessorInitializerMessageName()) { + result = result && getRowProcessorInitializerMessageName() + .equals(other.getRowProcessorInitializerMessageName()); + } + result = result && (hasRowProcessorInitializerMessage() == other.hasRowProcessorInitializerMessage()); + if (hasRowProcessorInitializerMessage()) { + result = result && getRowProcessorInitializerMessage() + .equals(other.getRowProcessorInitializerMessage()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRowProcessorClassName()) { + hash = (37 * hash) + ROWPROCESSORCLASSNAME_FIELD_NUMBER; + hash = (53 * hash) + getRowProcessorClassName().hashCode(); + } + if (hasRowProcessorInitializerMessageName()) { + hash = (37 * hash) + ROWPROCESSORINITIALIZERMESSAGENAME_FIELD_NUMBER; + hash = (53 * hash) + getRowProcessorInitializerMessageName().hashCode(); + } + if (hasRowProcessorInitializerMessage()) { + hash = (37 * hash) + ROWPROCESSORINITIALIZERMESSAGE_FIELD_NUMBER; + hash = (53 * hash) + getRowProcessorInitializerMessage().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + rowProcessorClassName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + rowProcessorInitializerMessageName_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + rowProcessorInitializerMessage_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.rowProcessorClassName_ = rowProcessorClassName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.rowProcessorInitializerMessageName_ = rowProcessorInitializerMessageName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.rowProcessorInitializerMessage_ = rowProcessorInitializerMessage_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.getDefaultInstance()) return this; + if (other.hasRowProcessorClassName()) { + setRowProcessorClassName(other.getRowProcessorClassName()); + } + if (other.hasRowProcessorInitializerMessageName()) { + setRowProcessorInitializerMessageName(other.getRowProcessorInitializerMessageName()); + } + if (other.hasRowProcessorInitializerMessage()) { + setRowProcessorInitializerMessage(other.getRowProcessorInitializerMessage()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRowProcessorClassName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + rowProcessorClassName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + rowProcessorInitializerMessageName_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + rowProcessorInitializerMessage_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string rowProcessorClassName = 1; + private java.lang.Object rowProcessorClassName_ = ""; + public boolean hasRowProcessorClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getRowProcessorClassName() { + java.lang.Object ref = rowProcessorClassName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + rowProcessorClassName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setRowProcessorClassName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + rowProcessorClassName_ = value; + onChanged(); + return this; + } + public Builder clearRowProcessorClassName() { + bitField0_ = (bitField0_ & ~0x00000001); + rowProcessorClassName_ = getDefaultInstance().getRowProcessorClassName(); + onChanged(); + return this; + } + void setRowProcessorClassName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + rowProcessorClassName_ = value; + onChanged(); + } + + // optional string rowProcessorInitializerMessageName = 2; + private java.lang.Object rowProcessorInitializerMessageName_ = ""; + public boolean hasRowProcessorInitializerMessageName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getRowProcessorInitializerMessageName() { + java.lang.Object ref = rowProcessorInitializerMessageName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + rowProcessorInitializerMessageName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setRowProcessorInitializerMessageName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + rowProcessorInitializerMessageName_ = value; + onChanged(); + return this; + } + public Builder clearRowProcessorInitializerMessageName() { + bitField0_ = (bitField0_ & ~0x00000002); + rowProcessorInitializerMessageName_ = getDefaultInstance().getRowProcessorInitializerMessageName(); + onChanged(); + return this; + } + void setRowProcessorInitializerMessageName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000002; + rowProcessorInitializerMessageName_ = value; + onChanged(); + } + + // optional bytes rowProcessorInitializerMessage = 3; + private com.google.protobuf.ByteString rowProcessorInitializerMessage_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRowProcessorInitializerMessage() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getRowProcessorInitializerMessage() { + return rowProcessorInitializerMessage_; + } + public Builder setRowProcessorInitializerMessage(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + rowProcessorInitializerMessage_ = value; + onChanged(); + return this; + } + public Builder clearRowProcessorInitializerMessage() { + bitField0_ = (bitField0_ & ~0x00000004); + rowProcessorInitializerMessage_ = getDefaultInstance().getRowProcessorInitializerMessage(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RowProcessorRequest) + } + + static { + defaultInstance = new RowProcessorRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RowProcessorRequest) + } + + public interface RowProcessorResultOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes rowProcessorResult = 1; + boolean hasRowProcessorResult(); + com.google.protobuf.ByteString getRowProcessorResult(); + } + public static final class RowProcessorResult extends + com.google.protobuf.GeneratedMessage + implements RowProcessorResultOrBuilder { + // Use RowProcessorResult.newBuilder() to construct. + private RowProcessorResult(Builder builder) { + super(builder); + } + private RowProcessorResult(boolean noInit) {} + + private static final RowProcessorResult defaultInstance; + public static RowProcessorResult getDefaultInstance() { + return defaultInstance; + } + + public RowProcessorResult getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_fieldAccessorTable; + } + + private int bitField0_; + // required bytes rowProcessorResult = 1; + public static final int ROWPROCESSORRESULT_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString rowProcessorResult_; + public boolean hasRowProcessorResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRowProcessorResult() { + return rowProcessorResult_; + } + + private void initFields() { + rowProcessorResult_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRowProcessorResult()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, rowProcessorResult_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, rowProcessorResult_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult other = (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult) obj; + + boolean result = true; + result = result && (hasRowProcessorResult() == other.hasRowProcessorResult()); + if (hasRowProcessorResult()) { + result = result && getRowProcessorResult() + .equals(other.getRowProcessorResult()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRowProcessorResult()) { + hash = (37 * hash) + ROWPROCESSORRESULT_FIELD_NUMBER; + hash = (53 * hash) + getRowProcessorResult().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + rowProcessorResult_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult build() { + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult result = new org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.rowProcessorResult_ = rowProcessorResult_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance()) return this; + if (other.hasRowProcessorResult()) { + setRowProcessorResult(other.getRowProcessorResult()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRowProcessorResult()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + rowProcessorResult_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes rowProcessorResult = 1; + private com.google.protobuf.ByteString rowProcessorResult_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRowProcessorResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRowProcessorResult() { + return rowProcessorResult_; + } + public Builder setRowProcessorResult(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + rowProcessorResult_ = value; + onChanged(); + return this; + } + public Builder clearRowProcessorResult() { + bitField0_ = (bitField0_ & ~0x00000001); + rowProcessorResult_ = getDefaultInstance().getRowProcessorResult(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RowProcessorResult) + } + + static { + defaultInstance = new RowProcessorResult(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RowProcessorResult) + } + + public static abstract class RowProcessorService + implements com.google.protobuf.Service { + protected RowProcessorService() {} + + public interface Interface { + public abstract void process( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new RowProcessorService() { + @java.lang.Override + public void process( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request, + com.google.protobuf.RpcCallback done) { + impl.process(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.process(controller, (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + public abstract void process( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.process(controller, (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void process( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.class, + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult process( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult process( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance()); + } + + } + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RowProcessorRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RowProcessorRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RowProcessorResult_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RowProcessorResult_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\022RowProcessor.proto\"\210\001\n\023RowProcessorReq" + + "uest\022\035\n\025rowProcessorClassName\030\001 \002(\t\022*\n\"r" + + "owProcessorInitializerMessageName\030\002 \001(\t\022" + + "&\n\036rowProcessorInitializerMessage\030\003 \001(\014\"" + + "0\n\022RowProcessorResult\022\032\n\022rowProcessorRes" + + "ult\030\001 \002(\0142K\n\023RowProcessorService\0224\n\007proc" + + "ess\022\024.RowProcessorRequest\032\023.RowProcessor" + + "ResultBH\n*org.apache.hadoop.hbase.protob" + + "uf.generatedB\022RowProcessorProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_RowProcessorRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_RowProcessorRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RowProcessorRequest_descriptor, + new java.lang.String[] { "RowProcessorClassName", "RowProcessorInitializerMessageName", "RowProcessorInitializerMessage", }, + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.Builder.class); + internal_static_RowProcessorResult_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_RowProcessorResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RowProcessorResult_descriptor, + new java.lang.String[] { "RowProcessorResult", }, + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.class, + org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java index f524374..174af98 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java @@ -23,15 +23,13 @@ import java.util.UUID; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; +import com.google.protobuf.Message; + /** * Base class for RowProcessor with some default implementations. */ -public abstract class BaseRowProcessor implements RowProcessor { - - @Override - public T getResult() { - return null; - } +public abstract class BaseRowProcessor +implements RowProcessor { @Override public void preProcess(HRegion region, WALEdit walEdit) throws IOException { diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 412abf6..c70e9ab 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -4302,7 +4302,7 @@ public class HRegion implements HeapSize { // , Writable{ * * @param processor The object defines the reads and writes to a row. */ - public void processRowsWithLocks(RowProcessor processor) + public void processRowsWithLocks(RowProcessor processor) throws IOException { processRowsWithLocks(processor, rowProcessorTimeout); } @@ -4314,7 +4314,7 @@ public class HRegion implements HeapSize { // , Writable{ * @param timeout The timeout of the processor.process() execution * Use a negative number to switch off the time bound */ - public void processRowsWithLocks(RowProcessor processor, long timeout) + public void processRowsWithLocks(RowProcessor processor, long timeout) throws IOException { for (byte[] row : processor.getRowsToLock()) { @@ -4453,7 +4453,7 @@ public class HRegion implements HeapSize { // , Writable{ } } - private void doProcessRowWithTimeout(final RowProcessor processor, + private void doProcessRowWithTimeout(final RowProcessor processor, final long now, final HRegion region, final List mutations, diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiRowMutationProcessor.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiRowMutationProcessor.java index c962bef..4bea011 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiRowMutationProcessor.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiRowMutationProcessor.java @@ -27,13 +27,16 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest; +import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Bytes; /** * A MultiRowProcessor that performs multiple puts and deletes. */ -class MultiRowMutationProcessor extends BaseRowProcessor { +class MultiRowMutationProcessor extends BaseRowProcessor { Collection rowsToLock; Collection mutations; @@ -52,6 +55,11 @@ class MultiRowMutationProcessor extends BaseRowProcessor { public boolean readOnly() { return false; } + + @Override + public MultiRowMutationProcessorResponse getResult() { + return MultiRowMutationProcessorResponse.getDefaultInstance(); + } @Override public void process(long now, @@ -123,4 +131,13 @@ class MultiRowMutationProcessor extends BaseRowProcessor { } } + @Override + public MultiRowMutationProcessorRequest getRequestData() { + return MultiRowMutationProcessorRequest.getDefaultInstance(); + } + + @Override + public void initialize(MultiRowMutationProcessorRequest msg) { + //nothing + } } diff --git hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowProcessor.java hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowProcessor.java index 4be0cd3..0c50496 100644 --- hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowProcessor.java +++ hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowProcessor.java @@ -27,6 +27,9 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; +import com.google.protobuf.ByteString; +import com.google.protobuf.Message; + @InterfaceAudience.Public @InterfaceStability.Evolving @@ -38,10 +41,12 @@ import org.apache.hadoop.hbase.regionserver.wal.WALEdit; * This class performs scans and generates mutations and WAL edits. * The locks and MVCC will be handled by HRegion. * - * The generic type parameter T is the return type of - * RowProcessor.getResult(). + * The RowProcessor user code could have data that needs to be + * sent across for proper initialization at the server side. The generic type + * parameter S is the type of the request data sent to the server. + * The generic type parameter T is the return type of RowProcessor.getResult(). */ -public interface RowProcessor { +public interface RowProcessor { /** * Rows to lock while operation. @@ -51,7 +56,9 @@ public interface RowProcessor { Collection getRowsToLock(); /** - * Obtain the processing result + * Obtain the processing result. All row processor implementations must + * implement this, even if the method is simply returning an empty + * Message. */ T getResult(); @@ -108,4 +115,22 @@ public interface RowProcessor { * @return The name of the processor */ String getName(); -} + + /** + * This method should return any additional data that is needed on the + * server side to construct the RowProcessor. The server will pass this to + * the {@link #initialize(ByteString)} method. If there is no RowProcessor + * specific data then null should be returned. + * @return the PB message + * @throws IOException + */ + S getRequestData() throws IOException; + + /** + * This method should initialize any field(s) of the RowProcessor with + * a parsing of the passed message bytes (used on the server side). + * @param msg + * @throws IOException + */ + void initialize(S msg) throws IOException; +} \ No newline at end of file diff --git hbase-server/src/main/protobuf/MultiRowMutationProcessorMessages.proto hbase-server/src/main/protobuf/MultiRowMutationProcessorMessages.proto new file mode 100644 index 0000000..cca5f55 --- /dev/null +++ hbase-server/src/main/protobuf/MultiRowMutationProcessorMessages.proto @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * Defines a protocol to perform multi row transactions. + * See BaseRowProcessorEndpoint for the implementation. + * See HRegion#processRowsWithLocks() for details. + */ +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "MultiRowMutationProcessorProtos"; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +message MultiRowMutationProcessorRequest{ +} + +message MultiRowMutationProcessorResponse{ +} \ No newline at end of file diff --git hbase-server/src/main/protobuf/RowProcessor.proto hbase-server/src/main/protobuf/RowProcessor.proto new file mode 100644 index 0000000..ff2588a --- /dev/null +++ hbase-server/src/main/protobuf/RowProcessor.proto @@ -0,0 +1,41 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * Defines a protocol to perform multi row transactions. + * See BaseRowProcessorEndpoint for the implementation. + * See HRegion#processRowsWithLocks() for details. + */ +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "RowProcessorProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +message RowProcessorRequest { + required string rowProcessorClassName = 1; + optional string rowProcessorInitializerMessageName = 2; + optional bytes rowProcessorInitializerMessage = 3; +} + +message RowProcessorResult { + required bytes rowProcessorResult = 1; +} + +service RowProcessorService { + rpc process (RowProcessorRequest) returns (RowProcessorResult); +} \ No newline at end of file diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java index 426a586..41dd6b7 100644 --- hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.coprocessor; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import java.io.DataInput; -import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -38,27 +36,39 @@ import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.IsolationLevel; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.coprocessor.RowProcessorClient; +import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse; +import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest; +import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult; +import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorService; import org.apache.hadoop.hbase.regionserver.BaseRowProcessor; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.wal.HLog; -import org.apache.hadoop.hbase.regionserver.wal.HLogUtil; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.Writable; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import com.google.protobuf.ByteString; +import com.google.protobuf.Message; import com.sun.org.apache.commons.logging.Log; import com.sun.org.apache.commons.logging.LogFactory; @@ -100,7 +110,7 @@ public class TestRowProcessorEndpoint { @BeforeClass public static void setupBeforeClass() throws Exception { Configuration conf = util.getConfiguration(); - conf.set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, + conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, RowProcessorEndpoint.class.getName()); conf.setInt("hbase.client.retries.number", 1); conf.setLong("hbase.hregion.row.processor.timeout", 1000L); @@ -138,12 +148,18 @@ public class TestRowProcessorEndpoint { @Test public void testDoubleScan() throws Throwable { prepareTestData(); - RowProcessorProtocol protocol = - table.coprocessorProxy(RowProcessorProtocol.class, ROW); + + CoprocessorRpcChannel channel = table.coprocessorService(ROW); RowProcessorEndpoint.FriendsOfFriendsProcessor processor = new RowProcessorEndpoint.FriendsOfFriendsProcessor(ROW, A); - Set result = protocol.process(processor); - + RowProcessorService.BlockingInterface service = + RowProcessorService.newBlockingStub(channel); + RowProcessorRequest request = RowProcessorClient.getRowProcessorPB(processor); + RowProcessorResult protoResult = service.process(null, request); + FriendsOfFriendsProcessorResponse response = + FriendsOfFriendsProcessorResponse.parseFrom(protoResult.getRowProcessorResult()); + Set result = new HashSet(); + result.addAll(response.getResultList()); Set expected = new HashSet(Arrays.asList(new String[]{"d", "e", "f", "g"})); Get get = new Get(ROW); @@ -176,12 +192,17 @@ public class TestRowProcessorEndpoint { } private int incrementCounter(HTable table) throws Throwable { - RowProcessorProtocol protocol = - table.coprocessorProxy(RowProcessorProtocol.class, ROW); + CoprocessorRpcChannel channel = table.coprocessorService(ROW); RowProcessorEndpoint.IncrementCounterProcessor processor = new RowProcessorEndpoint.IncrementCounterProcessor(ROW); - int counterValue = protocol.process(processor); - return counterValue; + RowProcessorService.BlockingInterface service = + RowProcessorService.newBlockingStub(channel); + RowProcessorRequest request = RowProcessorClient.getRowProcessorPB(processor); + RowProcessorResult protoResult = service.process(null, request); + IncCounterProcessorResponse response = IncCounterProcessorResponse + .parseFrom(protoResult.getRowProcessorResult()); + Integer result = response.getResponse(); + return result; } private void concurrentExec( @@ -234,23 +255,27 @@ public class TestRowProcessorEndpoint { } private void swapRows(HTable table) throws Throwable { - RowProcessorProtocol protocol = - table.coprocessorProxy(RowProcessorProtocol.class, ROW); + CoprocessorRpcChannel channel = table.coprocessorService(ROW); RowProcessorEndpoint.RowSwapProcessor processor = new RowProcessorEndpoint.RowSwapProcessor(ROW, ROW2); - protocol.process(processor); + RowProcessorService.BlockingInterface service = + RowProcessorService.newBlockingStub(channel); + RowProcessorRequest request = RowProcessorClient.getRowProcessorPB(processor); + service.process(null, request); } @Test public void testTimeout() throws Throwable { prepareTestData(); - RowProcessorProtocol protocol = - table.coprocessorProxy(RowProcessorProtocol.class, ROW); + CoprocessorRpcChannel channel = table.coprocessorService(ROW); RowProcessorEndpoint.TimeoutProcessor processor = new RowProcessorEndpoint.TimeoutProcessor(ROW); + RowProcessorService.BlockingInterface service = + RowProcessorService.newBlockingStub(channel); + RowProcessorRequest request = RowProcessorClient.getRowProcessorPB(processor); boolean exceptionCaught = false; try { - protocol.process(processor); + service.process(null, request); } catch (Exception e) { exceptionCaught = true; } @@ -264,11 +289,11 @@ public class TestRowProcessorEndpoint { * We define the RowProcessors as the inner class of the endpoint. * So they can be loaded with the endpoint on the coprocessor. */ - public static class RowProcessorEndpoint extends BaseRowProcessorEndpoint - implements RowProcessorProtocol { - + public static class RowProcessorEndpoint + extends BaseRowProcessorEndpoint implements CoprocessorService { public static class IncrementCounterProcessor extends - BaseRowProcessor implements Writable { + BaseRowProcessor { int counter = 0; byte[] row = new byte[0]; @@ -288,8 +313,10 @@ public class TestRowProcessorEndpoint { } @Override - public Integer getResult() { - return counter; + public IncCounterProcessorResponse getResult() { + IncCounterProcessorResponse.Builder i = IncCounterProcessorResponse.newBuilder(); + i.setResponse(counter); + return i.build(); } @Override @@ -330,21 +357,22 @@ public class TestRowProcessorEndpoint { } @Override - public void readFields(DataInput in) throws IOException { - this.row = Bytes.readByteArray(in); - this.counter = in.readInt(); + public IncCounterProcessorRequest getRequestData() throws IOException { + IncCounterProcessorRequest.Builder builder = IncCounterProcessorRequest.newBuilder(); + builder.setCounter(counter); + builder.setRow(ByteString.copyFrom(row)); + return builder.build(); } @Override - public void write(DataOutput out) throws IOException { - Bytes.writeByteArray(out, row); - out.writeInt(counter); + public void initialize(IncCounterProcessorRequest msg) { + this.row = msg.getRow().toByteArray(); + this.counter = msg.getCounter(); } - } public static class FriendsOfFriendsProcessor extends - BaseRowProcessor> implements Writable { + BaseRowProcessor { byte[] row = null; byte[] person = null; final Set result = new HashSet(); @@ -366,8 +394,11 @@ public class TestRowProcessorEndpoint { } @Override - public Set getResult() { - return result; + public FriendsOfFriendsProcessorResponse getResult() { + FriendsOfFriendsProcessorResponse.Builder builder = + FriendsOfFriendsProcessorResponse.newBuilder(); + builder.addAllResult(result); + return builder.build(); } @Override @@ -405,29 +436,28 @@ public class TestRowProcessorEndpoint { } @Override - public void readFields(DataInput in) throws IOException { - this.person = Bytes.readByteArray(in); - this.row = Bytes.readByteArray(in); - int size = in.readInt(); - result.clear(); - for (int i = 0; i < size; ++i) { - result.add(Text.readString(in)); - } + public FriendsOfFriendsProcessorRequest getRequestData() throws IOException { + FriendsOfFriendsProcessorRequest.Builder builder = + FriendsOfFriendsProcessorRequest.newBuilder(); + builder.setPerson(ByteString.copyFrom(person)); + builder.setRow(ByteString.copyFrom(row)); + builder.addAllResult(result); + FriendsOfFriendsProcessorRequest f = builder.build(); + return f; } @Override - public void write(DataOutput out) throws IOException { - Bytes.writeByteArray(out, person); - Bytes.writeByteArray(out, row); - out.writeInt(result.size()); - for (String s : result) { - Text.writeString(out, s); - } + public void initialize(FriendsOfFriendsProcessorRequest request) + throws IOException { + this.person = request.getPerson().toByteArray(); + this.row = request.getRow().toByteArray(); + result.clear(); + result.addAll(request.getResultList()); } } public static class RowSwapProcessor extends - BaseRowProcessor> implements Writable { + BaseRowProcessor { byte[] row1 = new byte[0]; byte[] row2 = new byte[0]; @@ -456,6 +486,11 @@ public class TestRowProcessorEndpoint { } @Override + public RowSwapProcessorResponse getResult() { + return RowSwapProcessorResponse.getDefaultInstance(); + } + + @Override public void process(long now, HRegion region, List mutations, WALEdit walEdit) throws IOException { @@ -502,25 +537,27 @@ public class TestRowProcessorEndpoint { } @Override - public void readFields(DataInput in) throws IOException { - this.row1 = Bytes.readByteArray(in); - this.row2 = Bytes.readByteArray(in); + public String getName() { + return "swap"; } @Override - public void write(DataOutput out) throws IOException { - Bytes.writeByteArray(out, row1); - Bytes.writeByteArray(out, row2); + public RowSwapProcessorRequest getRequestData() throws IOException { + RowSwapProcessorRequest.Builder builder = RowSwapProcessorRequest.newBuilder(); + builder.setRow1(ByteString.copyFrom(row1)); + builder.setRow2(ByteString.copyFrom(row2)); + return builder.build(); } @Override - public String getName() { - return "swap"; + public void initialize(RowSwapProcessorRequest msg) { + this.row1 = msg.getRow1().toByteArray(); + this.row2 = msg.getRow2().toByteArray(); } } public static class TimeoutProcessor extends - BaseRowProcessor implements Writable { + BaseRowProcessor { byte[] row = new byte[0]; @@ -539,6 +576,11 @@ public class TestRowProcessorEndpoint { } @Override + public TimeoutProcessorResponse getResult() { + return TimeoutProcessorResponse.getDefaultInstance(); + } + + @Override public void process(long now, HRegion region, List mutations, WALEdit walEdit) throws IOException { try { @@ -555,18 +597,20 @@ public class TestRowProcessorEndpoint { } @Override - public void readFields(DataInput in) throws IOException { - this.row = Bytes.readByteArray(in); + public String getName() { + return "timeout"; } @Override - public void write(DataOutput out) throws IOException { - Bytes.writeByteArray(out, row); + public TimeoutProcessorRequest getRequestData() throws IOException { + TimeoutProcessorRequest.Builder builder = TimeoutProcessorRequest.newBuilder(); + builder.setRow(ByteString.copyFrom(row)); + return builder.build(); } @Override - public String getName() { - return "timeout"; + public void initialize(TimeoutProcessorRequest msg) throws IOException { + this.row = msg.getRow().toByteArray(); } } diff --git hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/IncrementCounterProcessorTestProtos.java hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/IncrementCounterProcessorTestProtos.java new file mode 100644 index 0000000..e46cdc1 --- /dev/null +++ hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/IncrementCounterProcessorTestProtos.java @@ -0,0 +1,3425 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: IncrementCounterProcessor.proto + +package org.apache.hadoop.hbase.coprocessor.protobuf.generated; + +public final class IncrementCounterProcessorTestProtos { + private IncrementCounterProcessorTestProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface IncCounterProcessorRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row = 1; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + + // required int32 counter = 2; + boolean hasCounter(); + int getCounter(); + } + public static final class IncCounterProcessorRequest extends + com.google.protobuf.GeneratedMessage + implements IncCounterProcessorRequestOrBuilder { + // Use IncCounterProcessorRequest.newBuilder() to construct. + private IncCounterProcessorRequest(Builder builder) { + super(builder); + } + private IncCounterProcessorRequest(boolean noInit) {} + + private static final IncCounterProcessorRequest defaultInstance; + public static IncCounterProcessorRequest getDefaultInstance() { + return defaultInstance; + } + + public IncCounterProcessorRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes row = 1; + public static final int ROW_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + // required int32 counter = 2; + public static final int COUNTER_FIELD_NUMBER = 2; + private int counter_; + public boolean hasCounter() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getCounter() { + return counter_; + } + + private void initFields() { + row_ = com.google.protobuf.ByteString.EMPTY; + counter_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasCounter()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeInt32(2, counter_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, counter_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest) obj; + + boolean result = true; + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && (hasCounter() == other.hasCounter()); + if (hasCounter()) { + result = result && (getCounter() + == other.getCounter()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + if (hasCounter()) { + hash = (37 * hash) + COUNTER_FIELD_NUMBER; + hash = (53 * hash) + getCounter(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + counter_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row_ = row_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.counter_ = counter_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.getDefaultInstance()) return this; + if (other.hasRow()) { + setRow(other.getRow()); + } + if (other.hasCounter()) { + setCounter(other.getCounter()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow()) { + + return false; + } + if (!hasCounter()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + counter_ = input.readInt32(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row = 1; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000001); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // required int32 counter = 2; + private int counter_ ; + public boolean hasCounter() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getCounter() { + return counter_; + } + public Builder setCounter(int value) { + bitField0_ |= 0x00000002; + counter_ = value; + onChanged(); + return this; + } + public Builder clearCounter() { + bitField0_ = (bitField0_ & ~0x00000002); + counter_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:IncCounterProcessorRequest) + } + + static { + defaultInstance = new IncCounterProcessorRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:IncCounterProcessorRequest) + } + + public interface IncCounterProcessorResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required int32 response = 1; + boolean hasResponse(); + int getResponse(); + } + public static final class IncCounterProcessorResponse extends + com.google.protobuf.GeneratedMessage + implements IncCounterProcessorResponseOrBuilder { + // Use IncCounterProcessorResponse.newBuilder() to construct. + private IncCounterProcessorResponse(Builder builder) { + super(builder); + } + private IncCounterProcessorResponse(boolean noInit) {} + + private static final IncCounterProcessorResponse defaultInstance; + public static IncCounterProcessorResponse getDefaultInstance() { + return defaultInstance; + } + + public IncCounterProcessorResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_fieldAccessorTable; + } + + private int bitField0_; + // required int32 response = 1; + public static final int RESPONSE_FIELD_NUMBER = 1; + private int response_; + public boolean hasResponse() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getResponse() { + return response_; + } + + private void initFields() { + response_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasResponse()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt32(1, response_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, response_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse) obj; + + boolean result = true; + result = result && (hasResponse() == other.hasResponse()); + if (hasResponse()) { + result = result && (getResponse() + == other.getResponse()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasResponse()) { + hash = (37 * hash) + RESPONSE_FIELD_NUMBER; + hash = (53 * hash) + getResponse(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + response_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.response_ = response_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.getDefaultInstance()) return this; + if (other.hasResponse()) { + setResponse(other.getResponse()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasResponse()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + response_ = input.readInt32(); + break; + } + } + } + } + + private int bitField0_; + + // required int32 response = 1; + private int response_ ; + public boolean hasResponse() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getResponse() { + return response_; + } + public Builder setResponse(int value) { + bitField0_ |= 0x00000001; + response_ = value; + onChanged(); + return this; + } + public Builder clearResponse() { + bitField0_ = (bitField0_ & ~0x00000001); + response_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:IncCounterProcessorResponse) + } + + static { + defaultInstance = new IncCounterProcessorResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:IncCounterProcessorResponse) + } + + public interface FriendsOfFriendsProcessorRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes person = 1; + boolean hasPerson(); + com.google.protobuf.ByteString getPerson(); + + // required bytes row = 2; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + + // repeated string result = 3; + java.util.List getResultList(); + int getResultCount(); + String getResult(int index); + } + public static final class FriendsOfFriendsProcessorRequest extends + com.google.protobuf.GeneratedMessage + implements FriendsOfFriendsProcessorRequestOrBuilder { + // Use FriendsOfFriendsProcessorRequest.newBuilder() to construct. + private FriendsOfFriendsProcessorRequest(Builder builder) { + super(builder); + } + private FriendsOfFriendsProcessorRequest(boolean noInit) {} + + private static final FriendsOfFriendsProcessorRequest defaultInstance; + public static FriendsOfFriendsProcessorRequest getDefaultInstance() { + return defaultInstance; + } + + public FriendsOfFriendsProcessorRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes person = 1; + public static final int PERSON_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString person_; + public boolean hasPerson() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getPerson() { + return person_; + } + + // required bytes row = 2; + public static final int ROW_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + // repeated string result = 3; + public static final int RESULT_FIELD_NUMBER = 3; + private com.google.protobuf.LazyStringList result_; + public java.util.List + getResultList() { + return result_; + } + public int getResultCount() { + return result_.size(); + } + public String getResult(int index) { + return result_.get(index); + } + + private void initFields() { + person_ = com.google.protobuf.ByteString.EMPTY; + row_ = com.google.protobuf.ByteString.EMPTY; + result_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasPerson()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, person_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, row_); + } + for (int i = 0; i < result_.size(); i++) { + output.writeBytes(3, result_.getByteString(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, person_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, row_); + } + { + int dataSize = 0; + for (int i = 0; i < result_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(result_.getByteString(i)); + } + size += dataSize; + size += 1 * getResultList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest) obj; + + boolean result = true; + result = result && (hasPerson() == other.hasPerson()); + if (hasPerson()) { + result = result && getPerson() + .equals(other.getPerson()); + } + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && getResultList() + .equals(other.getResultList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPerson()) { + hash = (37 * hash) + PERSON_FIELD_NUMBER; + hash = (53 * hash) + getPerson().hashCode(); + } + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + if (getResultCount() > 0) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResultList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + person_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + result_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.person_ = person_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.row_ = row_; + if (((bitField0_ & 0x00000004) == 0x00000004)) { + result_ = new com.google.protobuf.UnmodifiableLazyStringList( + result_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.result_ = result_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.getDefaultInstance()) return this; + if (other.hasPerson()) { + setPerson(other.getPerson()); + } + if (other.hasRow()) { + setRow(other.getRow()); + } + if (!other.result_.isEmpty()) { + if (result_.isEmpty()) { + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureResultIsMutable(); + result_.addAll(other.result_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasPerson()) { + + return false; + } + if (!hasRow()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + person_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + row_ = input.readBytes(); + break; + } + case 26: { + ensureResultIsMutable(); + result_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes person = 1; + private com.google.protobuf.ByteString person_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasPerson() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getPerson() { + return person_; + } + public Builder setPerson(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + person_ = value; + onChanged(); + return this; + } + public Builder clearPerson() { + bitField0_ = (bitField0_ & ~0x00000001); + person_ = getDefaultInstance().getPerson(); + onChanged(); + return this; + } + + // required bytes row = 2; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000002); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // repeated string result = 3; + private com.google.protobuf.LazyStringList result_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureResultIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + result_ = new com.google.protobuf.LazyStringArrayList(result_); + bitField0_ |= 0x00000004; + } + } + public java.util.List + getResultList() { + return java.util.Collections.unmodifiableList(result_); + } + public int getResultCount() { + return result_.size(); + } + public String getResult(int index) { + return result_.get(index); + } + public Builder setResult( + int index, String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.set(index, value); + onChanged(); + return this; + } + public Builder addResult(String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(value); + onChanged(); + return this; + } + public Builder addAllResult( + java.lang.Iterable values) { + ensureResultIsMutable(); + super.addAll(values, result_); + onChanged(); + return this; + } + public Builder clearResult() { + result_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + void addResult(com.google.protobuf.ByteString value) { + ensureResultIsMutable(); + result_.add(value); + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:FriendsOfFriendsProcessorRequest) + } + + static { + defaultInstance = new FriendsOfFriendsProcessorRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:FriendsOfFriendsProcessorRequest) + } + + public interface FriendsOfFriendsProcessorResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated string result = 1; + java.util.List getResultList(); + int getResultCount(); + String getResult(int index); + } + public static final class FriendsOfFriendsProcessorResponse extends + com.google.protobuf.GeneratedMessage + implements FriendsOfFriendsProcessorResponseOrBuilder { + // Use FriendsOfFriendsProcessorResponse.newBuilder() to construct. + private FriendsOfFriendsProcessorResponse(Builder builder) { + super(builder); + } + private FriendsOfFriendsProcessorResponse(boolean noInit) {} + + private static final FriendsOfFriendsProcessorResponse defaultInstance; + public static FriendsOfFriendsProcessorResponse getDefaultInstance() { + return defaultInstance; + } + + public FriendsOfFriendsProcessorResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_fieldAccessorTable; + } + + // repeated string result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private com.google.protobuf.LazyStringList result_; + public java.util.List + getResultList() { + return result_; + } + public int getResultCount() { + return result_.size(); + } + public String getResult(int index) { + return result_.get(index); + } + + private void initFields() { + result_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < result_.size(); i++) { + output.writeBytes(1, result_.getByteString(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < result_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(result_.getByteString(i)); + } + size += dataSize; + size += 1 * getResultList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse) obj; + + boolean result = true; + result = result && getResultList() + .equals(other.getResultList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getResultCount() > 0) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResultList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + result_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new com.google.protobuf.UnmodifiableLazyStringList( + result_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.result_ = result_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.getDefaultInstance()) return this; + if (!other.result_.isEmpty()) { + if (result_.isEmpty()) { + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureResultIsMutable(); + result_.addAll(other.result_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + ensureResultIsMutable(); + result_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // repeated string result = 1; + private com.google.protobuf.LazyStringList result_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureResultIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new com.google.protobuf.LazyStringArrayList(result_); + bitField0_ |= 0x00000001; + } + } + public java.util.List + getResultList() { + return java.util.Collections.unmodifiableList(result_); + } + public int getResultCount() { + return result_.size(); + } + public String getResult(int index) { + return result_.get(index); + } + public Builder setResult( + int index, String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.set(index, value); + onChanged(); + return this; + } + public Builder addResult(String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(value); + onChanged(); + return this; + } + public Builder addAllResult( + java.lang.Iterable values) { + ensureResultIsMutable(); + super.addAll(values, result_); + onChanged(); + return this; + } + public Builder clearResult() { + result_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + void addResult(com.google.protobuf.ByteString value) { + ensureResultIsMutable(); + result_.add(value); + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:FriendsOfFriendsProcessorResponse) + } + + static { + defaultInstance = new FriendsOfFriendsProcessorResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:FriendsOfFriendsProcessorResponse) + } + + public interface RowSwapProcessorRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row1 = 1; + boolean hasRow1(); + com.google.protobuf.ByteString getRow1(); + + // required bytes row2 = 2; + boolean hasRow2(); + com.google.protobuf.ByteString getRow2(); + } + public static final class RowSwapProcessorRequest extends + com.google.protobuf.GeneratedMessage + implements RowSwapProcessorRequestOrBuilder { + // Use RowSwapProcessorRequest.newBuilder() to construct. + private RowSwapProcessorRequest(Builder builder) { + super(builder); + } + private RowSwapProcessorRequest(boolean noInit) {} + + private static final RowSwapProcessorRequest defaultInstance; + public static RowSwapProcessorRequest getDefaultInstance() { + return defaultInstance; + } + + public RowSwapProcessorRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes row1 = 1; + public static final int ROW1_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row1_; + public boolean hasRow1() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow1() { + return row1_; + } + + // required bytes row2 = 2; + public static final int ROW2_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString row2_; + public boolean hasRow2() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getRow2() { + return row2_; + } + + private void initFields() { + row1_ = com.google.protobuf.ByteString.EMPTY; + row2_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow1()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasRow2()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row1_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, row2_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row1_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, row2_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest) obj; + + boolean result = true; + result = result && (hasRow1() == other.hasRow1()); + if (hasRow1()) { + result = result && getRow1() + .equals(other.getRow1()); + } + result = result && (hasRow2() == other.hasRow2()); + if (hasRow2()) { + result = result && getRow2() + .equals(other.getRow2()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow1()) { + hash = (37 * hash) + ROW1_FIELD_NUMBER; + hash = (53 * hash) + getRow1().hashCode(); + } + if (hasRow2()) { + hash = (37 * hash) + ROW2_FIELD_NUMBER; + hash = (53 * hash) + getRow2().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row1_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + row2_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row1_ = row1_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.row2_ = row2_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.getDefaultInstance()) return this; + if (other.hasRow1()) { + setRow1(other.getRow1()); + } + if (other.hasRow2()) { + setRow2(other.getRow2()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow1()) { + + return false; + } + if (!hasRow2()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row1_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + row2_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row1 = 1; + private com.google.protobuf.ByteString row1_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow1() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow1() { + return row1_; + } + public Builder setRow1(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row1_ = value; + onChanged(); + return this; + } + public Builder clearRow1() { + bitField0_ = (bitField0_ & ~0x00000001); + row1_ = getDefaultInstance().getRow1(); + onChanged(); + return this; + } + + // required bytes row2 = 2; + private com.google.protobuf.ByteString row2_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow2() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getRow2() { + return row2_; + } + public Builder setRow2(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + row2_ = value; + onChanged(); + return this; + } + public Builder clearRow2() { + bitField0_ = (bitField0_ & ~0x00000002); + row2_ = getDefaultInstance().getRow2(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RowSwapProcessorRequest) + } + + static { + defaultInstance = new RowSwapProcessorRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RowSwapProcessorRequest) + } + + public interface RowSwapProcessorResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class RowSwapProcessorResponse extends + com.google.protobuf.GeneratedMessage + implements RowSwapProcessorResponseOrBuilder { + // Use RowSwapProcessorResponse.newBuilder() to construct. + private RowSwapProcessorResponse(Builder builder) { + super(builder); + } + private RowSwapProcessorResponse(boolean noInit) {} + + private static final RowSwapProcessorResponse defaultInstance; + public static RowSwapProcessorResponse getDefaultInstance() { + return defaultInstance; + } + + public RowSwapProcessorResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:RowSwapProcessorResponse) + } + + static { + defaultInstance = new RowSwapProcessorResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RowSwapProcessorResponse) + } + + public interface TimeoutProcessorRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row = 1; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + } + public static final class TimeoutProcessorRequest extends + com.google.protobuf.GeneratedMessage + implements TimeoutProcessorRequestOrBuilder { + // Use TimeoutProcessorRequest.newBuilder() to construct. + private TimeoutProcessorRequest(Builder builder) { + super(builder); + } + private TimeoutProcessorRequest(boolean noInit) {} + + private static final TimeoutProcessorRequest defaultInstance; + public static TimeoutProcessorRequest getDefaultInstance() { + return defaultInstance; + } + + public TimeoutProcessorRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes row = 1; + public static final int ROW_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + private void initFields() { + row_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest) obj; + + boolean result = true; + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row_ = row_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.getDefaultInstance()) return this; + if (other.hasRow()) { + setRow(other.getRow()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row = 1; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000001); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:TimeoutProcessorRequest) + } + + static { + defaultInstance = new TimeoutProcessorRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:TimeoutProcessorRequest) + } + + public interface TimeoutProcessorResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class TimeoutProcessorResponse extends + com.google.protobuf.GeneratedMessage + implements TimeoutProcessorResponseOrBuilder { + // Use TimeoutProcessorResponse.newBuilder() to construct. + private TimeoutProcessorResponse(Builder builder) { + super(builder); + } + private TimeoutProcessorResponse(boolean noInit) {} + + private static final TimeoutProcessorResponse defaultInstance; + public static TimeoutProcessorResponse getDefaultInstance() { + return defaultInstance; + } + + public TimeoutProcessorResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:TimeoutProcessorResponse) + } + + static { + defaultInstance = new TimeoutProcessorResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:TimeoutProcessorResponse) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_IncCounterProcessorRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_IncCounterProcessorRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_IncCounterProcessorResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_IncCounterProcessorResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_FriendsOfFriendsProcessorRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_FriendsOfFriendsProcessorRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_FriendsOfFriendsProcessorResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_FriendsOfFriendsProcessorResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RowSwapProcessorRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RowSwapProcessorRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RowSwapProcessorResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RowSwapProcessorResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_TimeoutProcessorRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_TimeoutProcessorRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_TimeoutProcessorResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_TimeoutProcessorResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\037IncrementCounterProcessor.proto\":\n\032Inc" + + "CounterProcessorRequest\022\013\n\003row\030\001 \002(\014\022\017\n\007" + + "counter\030\002 \002(\005\"/\n\033IncCounterProcessorResp" + + "onse\022\020\n\010response\030\001 \002(\005\"O\n FriendsOfFrien" + + "dsProcessorRequest\022\016\n\006person\030\001 \002(\014\022\013\n\003ro" + + "w\030\002 \002(\014\022\016\n\006result\030\003 \003(\t\"3\n!FriendsOfFrie" + + "ndsProcessorResponse\022\016\n\006result\030\001 \003(\t\"5\n\027" + + "RowSwapProcessorRequest\022\014\n\004row1\030\001 \002(\014\022\014\n" + + "\004row2\030\002 \002(\014\"\032\n\030RowSwapProcessorResponse\"" + + "&\n\027TimeoutProcessorRequest\022\013\n\003row\030\001 \002(\014\"", + "\032\n\030TimeoutProcessorResponseB`\n6org.apach" + + "e.hadoop.hbase.coprocessor.protobuf.gene" + + "ratedB#IncrementCounterProcessorTestProt" + + "os\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_IncCounterProcessorRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_IncCounterProcessorRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_IncCounterProcessorRequest_descriptor, + new java.lang.String[] { "Row", "Counter", }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.Builder.class); + internal_static_IncCounterProcessorResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_IncCounterProcessorResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_IncCounterProcessorResponse_descriptor, + new java.lang.String[] { "Response", }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.Builder.class); + internal_static_FriendsOfFriendsProcessorRequest_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_FriendsOfFriendsProcessorRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_FriendsOfFriendsProcessorRequest_descriptor, + new java.lang.String[] { "Person", "Row", "Result", }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.Builder.class); + internal_static_FriendsOfFriendsProcessorResponse_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_FriendsOfFriendsProcessorResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_FriendsOfFriendsProcessorResponse_descriptor, + new java.lang.String[] { "Result", }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.Builder.class); + internal_static_RowSwapProcessorRequest_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_RowSwapProcessorRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RowSwapProcessorRequest_descriptor, + new java.lang.String[] { "Row1", "Row2", }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.Builder.class); + internal_static_RowSwapProcessorResponse_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_RowSwapProcessorResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RowSwapProcessorResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.Builder.class); + internal_static_TimeoutProcessorRequest_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_TimeoutProcessorRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_TimeoutProcessorRequest_descriptor, + new java.lang.String[] { "Row", }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.Builder.class); + internal_static_TimeoutProcessorResponse_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_TimeoutProcessorResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_TimeoutProcessorResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git hbase-server/src/test/protobuf/IncrementCounterProcessor.proto hbase-server/src/test/protobuf/IncrementCounterProcessor.proto new file mode 100644 index 0000000..b8c77ca --- /dev/null +++ hbase-server/src/test/protobuf/IncrementCounterProcessor.proto @@ -0,0 +1,55 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +option java_package = "org.apache.hadoop.hbase.coprocessor.protobuf.generated"; +option java_outer_classname = "IncrementCounterProcessorTestProtos"; +option java_generate_equals_and_hash = true; + +message IncCounterProcessorRequest { + required bytes row = 1; + required int32 counter = 2; +} + +message IncCounterProcessorResponse { + required int32 response = 1; +} + +message FriendsOfFriendsProcessorRequest { + required bytes person = 1; + required bytes row = 2; + repeated string result = 3; +} + +message FriendsOfFriendsProcessorResponse { + repeated string result = 1; +} + +message RowSwapProcessorRequest { + required bytes row1 = 1; + required bytes row2 = 2; +} + +message RowSwapProcessorResponse { +} + +message TimeoutProcessorRequest { + required bytes row = 1; +} + +message TimeoutProcessorResponse { +} \ No newline at end of file