diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEndpointCoprocessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEndpointCoprocessor.java index d6d2a5e..ccc32b5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEndpointCoprocessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEndpointCoprocessor.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.ipc.VersionedProtocol; * However you still can just implement the interface CoprocessorProtocol * and Coprocessor to develop an Endpoint. But you won't be able to access * the region related resource, i.e., CoprocessorEnvironment. + * @deprecated CoprocessorProtocol is going away in 0.96 */ @InterfaceAudience.Public @InterfaceStability.Evolving diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/PingProtos.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/PingProtos.java new file mode 100644 index 0000000..ca86c51 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/PingProtos.java @@ -0,0 +1,4231 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: PingProtocol.proto + +package org.apache.hadoop.hbase.coprocessor.protobuf.generated; + +public final class PingProtos { + private PingProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface PingRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class PingRequest extends + com.google.protobuf.GeneratedMessage + implements PingRequestOrBuilder { + // Use PingRequest.newBuilder() to construct. + private PingRequest(Builder builder) { + super(builder); + } + private PingRequest(boolean noInit) {} + + private static final PingRequest defaultInstance; + public static PingRequest getDefaultInstance() { + return defaultInstance; + } + + public PingRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:PingRequest) + } + + static { + defaultInstance = new PingRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:PingRequest) + } + + public interface PingResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string pong = 1; + boolean hasPong(); + String getPong(); + } + public static final class PingResponse extends + com.google.protobuf.GeneratedMessage + implements PingResponseOrBuilder { + // Use PingResponse.newBuilder() to construct. + private PingResponse(Builder builder) { + super(builder); + } + private PingResponse(boolean noInit) {} + + private static final PingResponse defaultInstance; + public static PingResponse getDefaultInstance() { + return defaultInstance; + } + + public PingResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingResponse_fieldAccessorTable; + } + + private int bitField0_; + // required string pong = 1; + public static final int PONG_FIELD_NUMBER = 1; + private java.lang.Object pong_; + public boolean hasPong() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getPong() { + java.lang.Object ref = pong_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + pong_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getPongBytes() { + java.lang.Object ref = pong_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + pong_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + pong_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasPong()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getPongBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getPongBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse) obj; + + boolean result = true; + result = result && (hasPong() == other.hasPong()); + if (hasPong()) { + result = result && getPong() + .equals(other.getPong()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPong()) { + hash = (37 * hash) + PONG_FIELD_NUMBER; + hash = (53 * hash) + getPong().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_PingResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + pong_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.pong_ = pong_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.getDefaultInstance()) return this; + if (other.hasPong()) { + setPong(other.getPong()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasPong()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + pong_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string pong = 1; + private java.lang.Object pong_ = ""; + public boolean hasPong() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getPong() { + java.lang.Object ref = pong_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + pong_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setPong(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + pong_ = value; + onChanged(); + return this; + } + public Builder clearPong() { + bitField0_ = (bitField0_ & ~0x00000001); + pong_ = getDefaultInstance().getPong(); + onChanged(); + return this; + } + void setPong(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + pong_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:PingResponse) + } + + static { + defaultInstance = new PingResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:PingResponse) + } + + public interface CountRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class CountRequest extends + com.google.protobuf.GeneratedMessage + implements CountRequestOrBuilder { + // Use CountRequest.newBuilder() to construct. + private CountRequest(Builder builder) { + super(builder); + } + private CountRequest(boolean noInit) {} + + private static final CountRequest defaultInstance; + public static CountRequest getDefaultInstance() { + return defaultInstance; + } + + public CountRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:CountRequest) + } + + static { + defaultInstance = new CountRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CountRequest) + } + + public interface CountResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required int32 count = 1; + boolean hasCount(); + int getCount(); + } + public static final class CountResponse extends + com.google.protobuf.GeneratedMessage + implements CountResponseOrBuilder { + // Use CountResponse.newBuilder() to construct. + private CountResponse(Builder builder) { + super(builder); + } + private CountResponse(boolean noInit) {} + + private static final CountResponse defaultInstance; + public static CountResponse getDefaultInstance() { + return defaultInstance; + } + + public CountResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountResponse_fieldAccessorTable; + } + + private int bitField0_; + // required int32 count = 1; + public static final int COUNT_FIELD_NUMBER = 1; + private int count_; + public boolean hasCount() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getCount() { + return count_; + } + + private void initFields() { + count_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasCount()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt32(1, count_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, count_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse) obj; + + boolean result = true; + result = result && (hasCount() == other.hasCount()); + if (hasCount()) { + result = result && (getCount() + == other.getCount()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasCount()) { + hash = (37 * hash) + COUNT_FIELD_NUMBER; + hash = (53 * hash) + getCount(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_CountResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + count_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.count_ = count_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.getDefaultInstance()) return this; + if (other.hasCount()) { + setCount(other.getCount()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasCount()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + count_ = input.readInt32(); + break; + } + } + } + } + + private int bitField0_; + + // required int32 count = 1; + private int count_ ; + public boolean hasCount() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getCount() { + return count_; + } + public Builder setCount(int value) { + bitField0_ |= 0x00000001; + count_ = value; + onChanged(); + return this; + } + public Builder clearCount() { + bitField0_ = (bitField0_ & ~0x00000001); + count_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:CountResponse) + } + + static { + defaultInstance = new CountResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CountResponse) + } + + public interface IncrementCountRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required int32 diff = 1; + boolean hasDiff(); + int getDiff(); + } + public static final class IncrementCountRequest extends + com.google.protobuf.GeneratedMessage + implements IncrementCountRequestOrBuilder { + // Use IncrementCountRequest.newBuilder() to construct. + private IncrementCountRequest(Builder builder) { + super(builder); + } + private IncrementCountRequest(boolean noInit) {} + + private static final IncrementCountRequest defaultInstance; + public static IncrementCountRequest getDefaultInstance() { + return defaultInstance; + } + + public IncrementCountRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountRequest_fieldAccessorTable; + } + + private int bitField0_; + // required int32 diff = 1; + public static final int DIFF_FIELD_NUMBER = 1; + private int diff_; + public boolean hasDiff() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getDiff() { + return diff_; + } + + private void initFields() { + diff_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasDiff()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt32(1, diff_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, diff_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest) obj; + + boolean result = true; + result = result && (hasDiff() == other.hasDiff()); + if (hasDiff()) { + result = result && (getDiff() + == other.getDiff()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasDiff()) { + hash = (37 * hash) + DIFF_FIELD_NUMBER; + hash = (53 * hash) + getDiff(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + diff_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.diff_ = diff_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.getDefaultInstance()) return this; + if (other.hasDiff()) { + setDiff(other.getDiff()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasDiff()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + diff_ = input.readInt32(); + break; + } + } + } + } + + private int bitField0_; + + // required int32 diff = 1; + private int diff_ ; + public boolean hasDiff() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getDiff() { + return diff_; + } + public Builder setDiff(int value) { + bitField0_ |= 0x00000001; + diff_ = value; + onChanged(); + return this; + } + public Builder clearDiff() { + bitField0_ = (bitField0_ & ~0x00000001); + diff_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:IncrementCountRequest) + } + + static { + defaultInstance = new IncrementCountRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:IncrementCountRequest) + } + + public interface IncrementCountResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required int32 count = 1; + boolean hasCount(); + int getCount(); + } + public static final class IncrementCountResponse extends + com.google.protobuf.GeneratedMessage + implements IncrementCountResponseOrBuilder { + // Use IncrementCountResponse.newBuilder() to construct. + private IncrementCountResponse(Builder builder) { + super(builder); + } + private IncrementCountResponse(boolean noInit) {} + + private static final IncrementCountResponse defaultInstance; + public static IncrementCountResponse getDefaultInstance() { + return defaultInstance; + } + + public IncrementCountResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountResponse_fieldAccessorTable; + } + + private int bitField0_; + // required int32 count = 1; + public static final int COUNT_FIELD_NUMBER = 1; + private int count_; + public boolean hasCount() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getCount() { + return count_; + } + + private void initFields() { + count_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasCount()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt32(1, count_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, count_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse) obj; + + boolean result = true; + result = result && (hasCount() == other.hasCount()); + if (hasCount()) { + result = result && (getCount() + == other.getCount()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasCount()) { + hash = (37 * hash) + COUNT_FIELD_NUMBER; + hash = (53 * hash) + getCount(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_IncrementCountResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + count_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.count_ = count_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.getDefaultInstance()) return this; + if (other.hasCount()) { + setCount(other.getCount()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasCount()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + count_ = input.readInt32(); + break; + } + } + } + } + + private int bitField0_; + + // required int32 count = 1; + private int count_ ; + public boolean hasCount() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getCount() { + return count_; + } + public Builder setCount(int value) { + bitField0_ |= 0x00000001; + count_ = value; + onChanged(); + return this; + } + public Builder clearCount() { + bitField0_ = (bitField0_ & ~0x00000001); + count_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:IncrementCountResponse) + } + + static { + defaultInstance = new IncrementCountResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:IncrementCountResponse) + } + + public interface HelloRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional string name = 1; + boolean hasName(); + String getName(); + } + public static final class HelloRequest extends + com.google.protobuf.GeneratedMessage + implements HelloRequestOrBuilder { + // Use HelloRequest.newBuilder() to construct. + private HelloRequest(Builder builder) { + super(builder); + } + private HelloRequest(boolean noInit) {} + + private static final HelloRequest defaultInstance; + public static HelloRequest getDefaultInstance() { + return defaultInstance; + } + + public HelloRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloRequest_fieldAccessorTable; + } + + private int bitField0_; + // optional string name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private java.lang.Object name_; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + name_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + name_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getNameBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getNameBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest) obj; + + boolean result = true; + result = result && (hasName() == other.hasName()); + if (hasName()) { + result = result && getName() + .equals(other.getName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + name_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.name_ = name_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.getDefaultInstance()) return this; + if (other.hasName()) { + setName(other.getName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // optional string name = 1; + private java.lang.Object name_ = ""; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + name_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + void setName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:HelloRequest) + } + + static { + defaultInstance = new HelloRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:HelloRequest) + } + + public interface HelloResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional string response = 1; + boolean hasResponse(); + String getResponse(); + } + public static final class HelloResponse extends + com.google.protobuf.GeneratedMessage + implements HelloResponseOrBuilder { + // Use HelloResponse.newBuilder() to construct. + private HelloResponse(Builder builder) { + super(builder); + } + private HelloResponse(boolean noInit) {} + + private static final HelloResponse defaultInstance; + public static HelloResponse getDefaultInstance() { + return defaultInstance; + } + + public HelloResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloResponse_fieldAccessorTable; + } + + private int bitField0_; + // optional string response = 1; + public static final int RESPONSE_FIELD_NUMBER = 1; + private java.lang.Object response_; + public boolean hasResponse() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getResponse() { + java.lang.Object ref = response_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + response_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getResponseBytes() { + java.lang.Object ref = response_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + response_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + response_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getResponseBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getResponseBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse) obj; + + boolean result = true; + result = result && (hasResponse() == other.hasResponse()); + if (hasResponse()) { + result = result && getResponse() + .equals(other.getResponse()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasResponse()) { + hash = (37 * hash) + RESPONSE_FIELD_NUMBER; + hash = (53 * hash) + getResponse().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_HelloResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + response_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.response_ = response_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.getDefaultInstance()) return this; + if (other.hasResponse()) { + setResponse(other.getResponse()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + response_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // optional string response = 1; + private java.lang.Object response_ = ""; + public boolean hasResponse() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getResponse() { + java.lang.Object ref = response_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + response_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setResponse(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + response_ = value; + onChanged(); + return this; + } + public Builder clearResponse() { + bitField0_ = (bitField0_ & ~0x00000001); + response_ = getDefaultInstance().getResponse(); + onChanged(); + return this; + } + void setResponse(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + response_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:HelloResponse) + } + + static { + defaultInstance = new HelloResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:HelloResponse) + } + + public interface NoopRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class NoopRequest extends + com.google.protobuf.GeneratedMessage + implements NoopRequestOrBuilder { + // Use NoopRequest.newBuilder() to construct. + private NoopRequest(Builder builder) { + super(builder); + } + private NoopRequest(boolean noInit) {} + + private static final NoopRequest defaultInstance; + public static NoopRequest getDefaultInstance() { + return defaultInstance; + } + + public NoopRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:NoopRequest) + } + + static { + defaultInstance = new NoopRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:NoopRequest) + } + + public interface NoopResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class NoopResponse extends + com.google.protobuf.GeneratedMessage + implements NoopResponseOrBuilder { + // Use NoopResponse.newBuilder() to construct. + private NoopResponse(Builder builder) { + super(builder); + } + private NoopResponse(boolean noInit) {} + + private static final NoopResponse defaultInstance; + public static NoopResponse getDefaultInstance() { + return defaultInstance; + } + + public NoopResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.internal_static_NoopResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse build() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse buildPartial() { + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse other) { + if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:NoopResponse) + } + + static { + defaultInstance = new NoopResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:NoopResponse) + } + + public static abstract class PingService + implements com.google.protobuf.Service { + protected PingService() {} + + public interface Interface { + public abstract void ping( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void count( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void increment( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void hello( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void noop( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new PingService() { + @java.lang.Override + public void ping( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest request, + com.google.protobuf.RpcCallback done) { + impl.ping(controller, request, done); + } + + @java.lang.Override + public void count( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest request, + com.google.protobuf.RpcCallback done) { + impl.count(controller, request, done); + } + + @java.lang.Override + public void increment( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest request, + com.google.protobuf.RpcCallback done) { + impl.increment(controller, request, done); + } + + @java.lang.Override + public void hello( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest request, + com.google.protobuf.RpcCallback done) { + impl.hello(controller, request, done); + } + + @java.lang.Override + public void noop( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest request, + com.google.protobuf.RpcCallback done) { + impl.noop(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.ping(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest)request); + case 1: + return impl.count(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest)request); + case 2: + return impl.increment(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest)request); + case 3: + return impl.hello(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest)request); + case 4: + return impl.noop(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + public abstract void ping( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void count( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void increment( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void hello( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void noop( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.ping(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.count(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 2: + this.increment(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 3: + this.hello(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 4: + this.noop(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void ping( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.getDefaultInstance())); + } + + public void count( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.getDefaultInstance())); + } + + public void increment( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.getDefaultInstance())); + } + + public void hello( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.getDefaultInstance())); + } + + public void noop( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse ping( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse count( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse increment( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse hello( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse noop( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse ping( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse count( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse increment( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse hello( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse noop( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.getDefaultInstance()); + } + + } + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_PingRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_PingRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_PingResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_PingResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CountRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CountRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CountResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CountResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_IncrementCountRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_IncrementCountRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_IncrementCountResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_IncrementCountResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_HelloRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_HelloRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_HelloResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_HelloResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_NoopRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_NoopRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_NoopResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_NoopResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\022PingProtocol.proto\"\r\n\013PingRequest\"\034\n\014P" + + "ingResponse\022\014\n\004pong\030\001 \002(\t\"\016\n\014CountReques" + + "t\"\036\n\rCountResponse\022\r\n\005count\030\001 \002(\005\"%\n\025Inc" + + "rementCountRequest\022\014\n\004diff\030\001 \002(\005\"\'\n\026Incr" + + "ementCountResponse\022\r\n\005count\030\001 \002(\005\"\034\n\014Hel" + + "loRequest\022\014\n\004name\030\001 \001(\t\"!\n\rHelloResponse" + + "\022\020\n\010response\030\001 \001(\t\"\r\n\013NoopRequest\"\016\n\014Noo" + + "pResponse2\345\001\n\013PingService\022#\n\004ping\022\014.Ping" + + "Request\032\r.PingResponse\022&\n\005count\022\r.CountR" + + "equest\032\016.CountResponse\022<\n\tincrement\022\026.In", + "crementCountRequest\032\027.IncrementCountResp" + + "onse\022&\n\005hello\022\r.HelloRequest\032\016.HelloResp" + + "onse\022#\n\004noop\022\014.NoopRequest\032\r.NoopRespons" + + "eBJ\n6org.apache.hadoop.hbase.coprocessor" + + ".protobuf.generatedB\nPingProtos\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_PingRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_PingRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_PingRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest.Builder.class); + internal_static_PingResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_PingResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_PingResponse_descriptor, + new java.lang.String[] { "Pong", }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse.Builder.class); + internal_static_CountRequest_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_CountRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CountRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest.Builder.class); + internal_static_CountResponse_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_CountResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CountResponse_descriptor, + new java.lang.String[] { "Count", }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse.Builder.class); + internal_static_IncrementCountRequest_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_IncrementCountRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_IncrementCountRequest_descriptor, + new java.lang.String[] { "Diff", }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest.Builder.class); + internal_static_IncrementCountResponse_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_IncrementCountResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_IncrementCountResponse_descriptor, + new java.lang.String[] { "Count", }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse.Builder.class); + internal_static_HelloRequest_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_HelloRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_HelloRequest_descriptor, + new java.lang.String[] { "Name", }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest.Builder.class); + internal_static_HelloResponse_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_HelloResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_HelloResponse_descriptor, + new java.lang.String[] { "Response", }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse.Builder.class); + internal_static_NoopRequest_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_NoopRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_NoopRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest.Builder.class); + internal_static_NoopResponse_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_NoopResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_NoopResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.class, + org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java index 5ff1e49..fb895b8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java @@ -18,96 +18,119 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.List; import java.util.Map; - -import org.apache.hadoop.hbase.*; +import java.util.Map.Entry; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.Coprocessor; +import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HRegionLocation; +import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.MiniHBaseCluster; +import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Row; import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.coprocessor.CoprocessorException; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; -import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; -import org.apache.hadoop.hbase.ipc.ProtocolSignature; +import org.apache.hadoop.hbase.coprocessor.CoprocessorService; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountRequest; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.CountResponse; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloRequest; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.HelloResponse; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountRequest; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.IncrementCountResponse; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRequest; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopResponse; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest; +import org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse; +import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.JVMClusterUtil; -import org.apache.hadoop.hbase.ipc.VersionedProtocol; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; +import org.junit.experimental.categories.Category; import com.google.common.collect.Lists; -import org.junit.experimental.categories.Category; +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; +import com.google.protobuf.ServiceException; @Category(MediumTests.class) public class TestServerCustomProtocol { - /* Test protocol */ - public static interface PingProtocol extends CoprocessorProtocol { - public String ping(); - public int getPingCount(); - public int incrementCount(int diff); - public String hello(String name); - public void noop(); - } + private static final Log LOG = LogFactory.getLog(TestServerCustomProtocol.class); + static final String WHOAREYOU = "Who are you?"; + static final String NOBODY = "nobody"; + static final String HELLO = "Hello, "; /* Test protocol implementation */ - public static class PingHandler implements Coprocessor, PingProtocol, VersionedProtocol { - static long VERSION = 1; + public static class PingHandler extends PingProtos.PingService + implements Coprocessor, CoprocessorService { private int counter = 0; - @Override - public String ping() { - counter++; - return "pong"; - } @Override - public int getPingCount() { - return counter; + public void start(CoprocessorEnvironment env) throws IOException { + if (env instanceof RegionCoprocessorEnvironment) return; + throw new CoprocessorException("Must be loaded on a table region!"); } @Override - public int incrementCount(int diff) { - counter += diff; - return counter; + public void stop(CoprocessorEnvironment env) throws IOException { + // Nothing to do. } @Override - public String hello(String name) { - if (name == null) { - return "Who are you?"; - } else if ("nobody".equals(name)) { - return null; - } - return "Hello, "+name; + public void ping(RpcController controller, PingRequest request, + RpcCallback done) { + this.counter++; + done.run(PingResponse.newBuilder().setPong("pong").build()); } @Override - public void noop() { - // do nothing, just test void return type + public void count(RpcController controller, CountRequest request, + RpcCallback done) { + done.run(CountResponse.newBuilder().setCount(this.counter).build()); } @Override - public ProtocolSignature getProtocolSignature( - String protocol, long version, int clientMethodsHashCode) - throws IOException { - return new ProtocolSignature(VERSION, null); + public void increment(RpcController controller, + IncrementCountRequest request, RpcCallback done) { + this.counter += request.getDiff(); + done.run(IncrementCountResponse.newBuilder().setCount(this.counter).build()); } @Override - public long getProtocolVersion(String s, long l) throws IOException { - return VERSION; + public void hello(RpcController controller, HelloRequest request, + RpcCallback done) { + if (!request.hasName()) done.run(HelloResponse.newBuilder().setResponse(WHOAREYOU).build()); + else if (request.getName().equals(NOBODY)) done.run(HelloResponse.newBuilder().build()); + else done.run(HelloResponse.newBuilder().setResponse(HELLO + request.getName()).build()); } @Override - public void start(CoprocessorEnvironment env) throws IOException { + public void noop(RpcController controller, NoopRequest request, + RpcCallback done) { + done.run(NoopResponse.newBuilder().build()); } @Override - public void stop(CoprocessorEnvironment env) throws IOException { + public Service getService() { + return this; } } @@ -127,14 +150,13 @@ public class TestServerCustomProtocol { @BeforeClass public static void setupBeforeClass() throws Exception { util.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, - PingHandler.class.getName()); - util.startMiniCluster(1); + PingHandler.class.getName()); + util.startMiniCluster(); cluster = util.getMiniHBaseCluster(); HTable table = util.createTable(TEST_TABLE, TEST_FAMILY); util.createMultiRegions(util.getConfiguration(), table, TEST_FAMILY, - new byte[][]{ HConstants.EMPTY_BYTE_ARRAY, - ROW_B, ROW_C}); + new byte[][]{ HConstants.EMPTY_BYTE_ARRAY, ROW_B, ROW_C}); Put puta = new Put( ROW_A ); puta.add(TEST_FAMILY, Bytes.toBytes("col1"), Bytes.toBytes(1)); @@ -155,127 +177,185 @@ public class TestServerCustomProtocol { } @Test - public void testSingleProxy() throws Exception { + public void testSingleProxy() throws Throwable { HTable table = new HTable(util.getConfiguration(), TEST_TABLE); + Map results = ping(table, null, null); + // There are three regions so should get back three results. + assertEquals(3, results.size()); + for (Map.Entry e: results.entrySet()) { + assertEquals("Invalid custom protocol response", "pong", e.getValue()); + } + hello(table, "George", HELLO + "George"); + LOG.info("Did george"); + hello(table, null, "Who are you?"); + LOG.info("Who are you"); + hello(table, NOBODY, null); + LOG.info(NOBODY); + Map intResults = table.coprocessorService(PingProtos.PingService.class, + null, null, + new Batch.Call() { + @Override + public Integer call(PingProtos.PingService instance) throws IOException { + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + instance.count(null, PingProtos.CountRequest.newBuilder().build(), rpcCallback); + return rpcCallback.get().getCount(); + } + }); + int count = -1; + for (Map.Entry e: intResults.entrySet()) { + assertTrue(e.getValue() > 0); + count = e.getValue(); + } + final int diff = 5; + intResults = table.coprocessorService(PingProtos.PingService.class, + null, null, + new Batch.Call() { + @Override + public Integer call(PingProtos.PingService instance) throws IOException { + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + instance.increment(null, PingProtos.IncrementCountRequest.newBuilder().setDiff(diff).build(), + rpcCallback); + return rpcCallback.get().getCount(); + } + }); + // There are three regions so should get back three results. + assertEquals(3, results.size()); + for (Map.Entry e: intResults.entrySet()) { + assertTrue(e.getValue() == count + diff); + } + table.close(); + } - PingProtocol pinger = table.coprocessorProxy(PingProtocol.class, ROW_A); - String result = pinger.ping(); - assertEquals("Invalid custom protocol response", "pong", result); - result = pinger.hello("George"); - assertEquals("Invalid custom protocol response", "Hello, George", result); - result = pinger.hello(null); - assertEquals("Should handle NULL parameter", "Who are you?", result); - result = pinger.hello("nobody"); - assertNull(result); - int cnt = pinger.getPingCount(); - assertTrue("Count should be incremented", cnt > 0); - int newcnt = pinger.incrementCount(5); - assertEquals("Counter should have incremented by 5", cnt+5, newcnt); + private Map hello(final HTable table, final String send, final String response) + throws ServiceException, Throwable { + Map results = hello(table, send); + for (Map.Entry e: results.entrySet()) { + assertEquals("Invalid custom protocol response", response, e.getValue()); + } + return results; + } + + private Map hello(final HTable table, final String send) + throws ServiceException, Throwable { + return hello(table, send, null, null); + } + + private Map hello(final HTable table, final String send, final byte [] start, + final byte [] end) + throws ServiceException, Throwable { + return table.coprocessorService(PingProtos.PingService.class, + start, end, + new Batch.Call() { + @Override + public String call(PingProtos.PingService instance) throws IOException { + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + PingProtos.HelloRequest.Builder builder = PingProtos.HelloRequest.newBuilder(); + if (send != null) builder.setName(send); + instance.hello(null, builder.build(), rpcCallback); + PingProtos.HelloResponse response = rpcCallback.get(); + return response != null && rpcCallback.get().hasResponse()? rpcCallback.get().getResponse(): null; + } + }); } @Test public void testSingleMethod() throws Throwable { HTable table = new HTable(util.getConfiguration(), TEST_TABLE); - - List rows = Lists.newArrayList( - new Get(ROW_A), new Get(ROW_B), new Get(ROW_C)); - - Batch.Call call = Batch.forMethod(PingProtocol.class, - "ping"); - Map results = - table.coprocessorExec(PingProtocol.class, ROW_A, ROW_C, call); - - + Map results = table.coprocessorService(PingProtos.PingService.class, + null, ROW_A, + new Batch.Call() { + @Override + public String call(PingProtos.PingService instance) throws IOException { + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + instance.ping(null, PingProtos.PingRequest.newBuilder().build(), rpcCallback); + return rpcCallback.get().getPong(); + } + }); + // Should have gotten results for 1 of the three regions only since we specified + // rows from 1 region + assertEquals(1, results.size()); verifyRegionResults(table, results, ROW_A); - verifyRegionResults(table, results, ROW_B); - verifyRegionResults(table, results, ROW_C); - - Batch.Call helloCall = - Batch.forMethod(PingProtocol.class, "hello", "NAME"); - results = - table.coprocessorExec(PingProtocol.class, ROW_A, ROW_C, helloCall); - + final String name = "NAME"; + results = hello(table, name, null, ROW_A); + // Should have gotten results for 1 of the three regions only since we specified + // rows from 1 region + assertEquals(1, results.size()); verifyRegionResults(table, results, "Hello, NAME", ROW_A); - verifyRegionResults(table, results, "Hello, NAME", ROW_B); - verifyRegionResults(table, results, "Hello, NAME", ROW_C); + table.close(); } @Test public void testRowRange() throws Throwable { HTable table = new HTable(util.getConfiguration(), TEST_TABLE); - - // test empty range - Map results = table.coprocessorExec(PingProtocol.class, - null, null, new Batch.Call() { - public String call(PingProtocol instance) { - return instance.ping(); - } - }); - // should contain all three rows/regions + for (Entry e: table.getRegionLocations().entrySet()) { + LOG.info("Region " + e.getKey().getRegionNameAsString() + ", servername=" + e.getValue()); + } + Map results = ping(table, null, ROW_A); + // Should contain all three rows/regions verifyRegionResults(table, results, ROW_A); verifyRegionResults(table, results, ROW_B); verifyRegionResults(table, results, ROW_C); - // test start row + empty end - results = table.coprocessorExec(PingProtocol.class, ROW_BC, null, - new Batch.Call() { - public String call(PingProtocol instance) { - return instance.ping(); - } - }); + // Test start row + empty end + results = ping(table, ROW_BC, null); // should contain last 2 regions - HRegionLocation loc = table.getRegionLocation(ROW_A); + HRegionLocation loc = table.getRegionLocation(ROW_A, true); assertNull("Should be missing region for row aaa (prior to start row)", results.get(loc.getRegionInfo().getRegionName())); verifyRegionResults(table, results, ROW_B); verifyRegionResults(table, results, ROW_C); // test empty start + end - results = table.coprocessorExec(PingProtocol.class, null, ROW_BC, - new Batch.Call() { - public String call(PingProtocol instance) { - return instance.ping(); - } - }); + results = ping(table, null, ROW_BC); // should contain the first 2 regions verifyRegionResults(table, results, ROW_A); verifyRegionResults(table, results, ROW_B); - loc = table.getRegionLocation(ROW_C); + loc = table.getRegionLocation(ROW_C, true); assertNull("Should be missing region for row ccc (past stop row)", results.get(loc.getRegionInfo().getRegionName())); // test explicit start + end - results = table.coprocessorExec(PingProtocol.class, ROW_AB, ROW_BC, - new Batch.Call() { - public String call(PingProtocol instance) { - return instance.ping(); - } - }); + results = ping(table, ROW_AB, ROW_BC); // should contain first 2 regions verifyRegionResults(table, results, ROW_A); verifyRegionResults(table, results, ROW_B); - loc = table.getRegionLocation(ROW_C); + loc = table.getRegionLocation(ROW_C, true); assertNull("Should be missing region for row ccc (past stop row)", results.get(loc.getRegionInfo().getRegionName())); // test single region - results = table.coprocessorExec(PingProtocol.class, ROW_B, ROW_BC, - new Batch.Call() { - public String call(PingProtocol instance) { - return instance.ping(); - } - }); + results = ping(table, ROW_B, ROW_BC); // should only contain region bbb verifyRegionResults(table, results, ROW_B); - loc = table.getRegionLocation(ROW_A); + loc = table.getRegionLocation(ROW_A, true); assertNull("Should be missing region for row aaa (prior to start)", results.get(loc.getRegionInfo().getRegionName())); - loc = table.getRegionLocation(ROW_C); + loc = table.getRegionLocation(ROW_C, true); assertNull("Should be missing region for row ccc (past stop row)", results.get(loc.getRegionInfo().getRegionName())); + table.close(); } + private Map ping(final HTable table, final byte [] start, final byte [] end) + throws ServiceException, Throwable { + return table.coprocessorService(PingProtos.PingService.class, start, end, + new Batch.Call() { + @Override + public String call(PingProtos.PingService instance) throws IOException { + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + instance.ping(null, PingProtos.PingRequest.newBuilder().build(), rpcCallback); + return rpcCallback.get().getPong(); + } + }); + } + +/* @Test public void testCompountCall() throws Throwable { HTable table = new HTable(util.getConfiguration(), TEST_TABLE); @@ -346,23 +426,26 @@ public class TestServerCustomProtocol { assertNull(v); } } - +*/ private void verifyRegionResults(HTable table, Map results, byte[] row) throws Exception { verifyRegionResults(table, results, "pong", row); } private void verifyRegionResults(HTable table, - Map results, String expected, byte[] row) + Map results, String expected, byte[] row) throws Exception { - HRegionLocation loc = table.getRegionLocation(row); + for (Map.Entry e: results.entrySet()) { + LOG.info("row=" + Bytes.toString(row) + ", expected=" + expected + + ", result key=" + Bytes.toString(e.getKey()) + + ", value=" + e.getValue()); + } + HRegionLocation loc = table.getRegionLocation(row, true); byte[] region = loc.getRegionInfo().getRegionName(); assertTrue("Results should contain region " + - Bytes.toStringBinary(region)+" for row '"+Bytes.toStringBinary(row)+"'", - results.containsKey(region)); + Bytes.toStringBinary(region) + " for row '" + Bytes.toStringBinary(row)+ "'", + results.containsKey(region)); assertEquals("Invalid result for row '"+Bytes.toStringBinary(row)+"'", - expected, results.get(region)); + expected, results.get(region)); } - -} - +} \ No newline at end of file diff --git a/hbase-server/src/test/protobuf/PingProtocol.proto b/hbase-server/src/test/protobuf/PingProtocol.proto new file mode 100644 index 0000000..ef63ee0 --- /dev/null +++ b/hbase-server/src/test/protobuf/PingProtocol.proto @@ -0,0 +1,66 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Coprocessor test +option java_package = "org.apache.hadoop.hbase.coprocessor.protobuf.generated"; +option java_outer_classname = "PingProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; + +message PingRequest { +} + +message PingResponse { + required string pong = 1; +} + +message CountRequest { +} + +message CountResponse { + required int32 count = 1; +} + +message IncrementCountRequest { + required int32 diff = 1; +} + +message IncrementCountResponse { + required int32 count = 1; +} + +message HelloRequest { + optional string name = 1; +} + +message HelloResponse { + optional string response = 1; +} + +message NoopRequest { +} + +message NoopResponse { +} + +service PingService { + rpc ping(PingRequest) returns(PingResponse); + rpc count(CountRequest) returns(CountResponse); + rpc increment(IncrementCountRequest) returns(IncrementCountResponse); + rpc hello(HelloRequest) returns(HelloResponse); + rpc noop(NoopRequest) returns(NoopResponse); +}