diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
index d20c7c8..1b94993 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
@@ -503,6 +503,15 @@ public class TestClientNoCluster extends Configured implements Tool {
}
@Override
+ public ClientProtos.PingResponse ping(RpcController controller,
+ ClientProtos.PingRequest request)
+ throws ServiceException {
+ ClientProtos.PingResponse.Builder pingResponse = ClientProtos.PingResponse.newBuilder();
+ pingResponse.setProcessed(true);
+ return pingResponse.build();
+ }
+
+ @Override
public CoprocessorServiceResponse execRegionServerService(RpcController controller,
CoprocessorServiceRequest request) throws ServiceException {
throw new NotImplementedException();
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java
index 47ab440..1d12a94 100644
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java
@@ -40113,6 +40113,940 @@ public final class ClientProtos {
}
+ public interface PingRequestOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:hbase.pb.PingRequest)
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
+
+ /**
+ * optional bool processed = 1;
+ */
+ boolean hasProcessed();
+ /**
+ * optional bool processed = 1;
+ */
+ boolean getProcessed();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.PingRequest}
+ */
+ public static final class PingRequest extends
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:hbase.pb.PingRequest)
+ PingRequestOrBuilder {
+ // Use PingRequest.newBuilder() to construct.
+ private PingRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ super(builder);
+ }
+ private PingRequest() {
+ processed_ = false;
+ }
+
+ @java.lang.Override
+ public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private PingRequest(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ this();
+ int mutable_bitField0_ = 0;
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ processed_ = input.readBool();
+ break;
+ }
+ }
+ }
+ } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
+ e).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingRequest_descriptor;
+ }
+
+ protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest.Builder.class);
+ }
+
+ private int bitField0_;
+ public static final int PROCESSED_FIELD_NUMBER = 1;
+ private boolean processed_;
+ /**
+ * optional bool processed = 1;
+ */
+ public boolean hasProcessed() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional bool processed = 1;
+ */
+ public boolean getProcessed() {
+ return processed_;
+ }
+
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBool(1, processed_);
+ }
+ unknownFields.writeTo(output);
+ }
+
+ public int getSerializedSize() {
+ int size = memoizedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+ .computeBoolSize(1, processed_);
+ }
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest) obj;
+
+ boolean result = true;
+ result = result && (hasProcessed() == other.hasProcessed());
+ if (hasProcessed()) {
+ result = result && (getProcessed()
+ == other.getProcessed());
+ }
+ result = result && unknownFields.equals(other.unknownFields);
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasProcessed()) {
+ hash = (37 * hash) + PROCESSED_FIELD_NUMBER;
+ hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean(
+ getProcessed());
+ }
+ hash = (29 * hash) + unknownFields.hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest parseFrom(byte[] data)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest parseFrom(
+ byte[] data,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest parseFrom(
+ java.io.InputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
+ public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest prototype) {
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
+ }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.PingRequest}
+ */
+ public static final class Builder extends
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements
+ // @@protoc_insertion_point(builder_implements:hbase.pb.PingRequest)
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequestOrBuilder {
+ public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingRequest_descriptor;
+ }
+
+ protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
+ }
+ }
+ public Builder clear() {
+ super.clear();
+ processed_ = false;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingRequest_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest build() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest buildPartial() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.processed_ = processed_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder clone() {
+ return (Builder) super.clone();
+ }
+ public Builder setField(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+ Object value) {
+ return (Builder) super.setField(field, value);
+ }
+ public Builder clearField(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return (Builder) super.clearField(field);
+ }
+ public Builder clearOneof(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return (Builder) super.clearOneof(oneof);
+ }
+ public Builder setRepeatedField(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, Object value) {
+ return (Builder) super.setRepeatedField(field, index, value);
+ }
+ public Builder addRepeatedField(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+ Object value) {
+ return (Builder) super.addRepeatedField(field, value);
+ }
+ public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest other) {
+ if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest.getDefaultInstance()) return this;
+ if (other.hasProcessed()) {
+ setProcessed(other.getProcessed());
+ }
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest) e.getUnfinishedMessage();
+ throw e.unwrapIOException();
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ private boolean processed_ ;
+ /**
+ * optional bool processed = 1;
+ */
+ public boolean hasProcessed() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional bool processed = 1;
+ */
+ public boolean getProcessed() {
+ return processed_;
+ }
+ /**
+ * optional bool processed = 1;
+ */
+ public Builder setProcessed(boolean value) {
+ bitField0_ |= 0x00000001;
+ processed_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional bool processed = 1;
+ */
+ public Builder clearProcessed() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ processed_ = false;
+ onChanged();
+ return this;
+ }
+ public final Builder setUnknownFields(
+ final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ public final Builder mergeUnknownFields(
+ final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.PingRequest)
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.PingRequest)
+ private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest DEFAULT_INSTANCE;
+ static {
+ DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest();
+ }
+
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser
+ PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() {
+ public PingRequest parsePartialFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return new PingRequest(input, extensionRegistry);
+ }
+ };
+
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
+ }
+
+ }
+
+ public interface PingResponseOrBuilder extends
+ // @@protoc_insertion_point(interface_extends:hbase.pb.PingResponse)
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
+
+ /**
+ * optional bool processed = 1;
+ */
+ boolean hasProcessed();
+ /**
+ * optional bool processed = 1;
+ */
+ boolean getProcessed();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.PingResponse}
+ */
+ public static final class PingResponse extends
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:hbase.pb.PingResponse)
+ PingResponseOrBuilder {
+ // Use PingResponse.newBuilder() to construct.
+ private PingResponse(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ super(builder);
+ }
+ private PingResponse() {
+ processed_ = false;
+ }
+
+ @java.lang.Override
+ public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private PingResponse(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ this();
+ int mutable_bitField0_ = 0;
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ processed_ = input.readBool();
+ break;
+ }
+ }
+ }
+ } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
+ e).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingResponse_descriptor;
+ }
+
+ protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse.Builder.class);
+ }
+
+ private int bitField0_;
+ public static final int PROCESSED_FIELD_NUMBER = 1;
+ private boolean processed_;
+ /**
+ * optional bool processed = 1;
+ */
+ public boolean hasProcessed() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional bool processed = 1;
+ */
+ public boolean getProcessed() {
+ return processed_;
+ }
+
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBool(1, processed_);
+ }
+ unknownFields.writeTo(output);
+ }
+
+ public int getSerializedSize() {
+ int size = memoizedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+ .computeBoolSize(1, processed_);
+ }
+ size += unknownFields.getSerializedSize();
+ memoizedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse) obj;
+
+ boolean result = true;
+ result = result && (hasProcessed() == other.hasProcessed());
+ if (hasProcessed()) {
+ result = result && (getProcessed()
+ == other.getProcessed());
+ }
+ result = result && unknownFields.equals(other.unknownFields);
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasProcessed()) {
+ hash = (37 * hash) + PROCESSED_FIELD_NUMBER;
+ hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean(
+ getProcessed());
+ }
+ hash = (29 * hash) + unknownFields.hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse parseFrom(byte[] data)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse parseFrom(
+ byte[] data,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse parseFrom(
+ java.io.InputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
+ public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse prototype) {
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
+ }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.PingResponse}
+ */
+ public static final class Builder extends
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements
+ // @@protoc_insertion_point(builder_implements:hbase.pb.PingResponse)
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponseOrBuilder {
+ public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingResponse_descriptor;
+ }
+
+ protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .alwaysUseFieldBuilders) {
+ }
+ }
+ public Builder clear() {
+ super.clear();
+ processed_ = false;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingResponse_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse build() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse buildPartial() {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.processed_ = processed_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder clone() {
+ return (Builder) super.clone();
+ }
+ public Builder setField(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+ Object value) {
+ return (Builder) super.setField(field, value);
+ }
+ public Builder clearField(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
+ return (Builder) super.clearField(field);
+ }
+ public Builder clearOneof(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ return (Builder) super.clearOneof(oneof);
+ }
+ public Builder setRepeatedField(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+ int index, Object value) {
+ return (Builder) super.setRepeatedField(field, index, value);
+ }
+ public Builder addRepeatedField(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
+ Object value) {
+ return (Builder) super.addRepeatedField(field, value);
+ }
+ public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse other) {
+ if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse.getDefaultInstance()) return this;
+ if (other.hasProcessed()) {
+ setProcessed(other.getProcessed());
+ }
+ this.mergeUnknownFields(other.unknownFields);
+ onChanged();
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse) e.getUnfinishedMessage();
+ throw e.unwrapIOException();
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ private boolean processed_ ;
+ /**
+ * optional bool processed = 1;
+ */
+ public boolean hasProcessed() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional bool processed = 1;
+ */
+ public boolean getProcessed() {
+ return processed_;
+ }
+ /**
+ * optional bool processed = 1;
+ */
+ public Builder setProcessed(boolean value) {
+ bitField0_ |= 0x00000001;
+ processed_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional bool processed = 1;
+ */
+ public Builder clearProcessed() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ processed_ = false;
+ onChanged();
+ return this;
+ }
+ public final Builder setUnknownFields(
+ final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.setUnknownFields(unknownFields);
+ }
+
+ public final Builder mergeUnknownFields(
+ final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+ return super.mergeUnknownFields(unknownFields);
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.PingResponse)
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.PingResponse)
+ private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse DEFAULT_INSTANCE;
+ static {
+ DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse();
+ }
+
+ public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser
+ PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() {
+ public PingResponse parsePartialFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return new PingResponse(input, extensionRegistry);
+ }
+ };
+
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() {
+ return PARSER;
+ }
+
+ @java.lang.Override
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse getDefaultInstanceForType() {
+ return DEFAULT_INSTANCE;
+ }
+
+ }
+
/**
* Protobuf service {@code hbase.pb.ClientService}
*/
@@ -40193,6 +41127,14 @@ public final class ClientProtos {
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest request,
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done);
+ /**
+ * rpc Ping(.hbase.pb.PingRequest) returns (.hbase.pb.PingResponse);
+ */
+ public abstract void ping(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest request,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done);
+
}
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Service newReflectiveService(
@@ -40270,6 +41212,14 @@ public final class ClientProtos {
impl.multi(controller, request, done);
}
+ @java.lang.Override
+ public void ping(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest request,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) {
+ impl.ping(controller, request, done);
+ }
+
};
}
@@ -40310,6 +41260,8 @@ public final class ClientProtos {
return impl.execRegionServerService(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
case 8:
return impl.multi(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest)request);
+ case 9:
+ return impl.ping(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -40342,6 +41294,8 @@ public final class ClientProtos {
return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
case 8:
return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
+ case 9:
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -40374,6 +41328,8 @@ public final class ClientProtos {
return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
case 8:
return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
+ case 9:
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -40454,6 +41410,14 @@ public final class ClientProtos {
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest request,
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done);
+ /**
+ * rpc Ping(.hbase.pb.PingRequest) returns (.hbase.pb.PingResponse);
+ */
+ public abstract void ping(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest request,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done);
+
public static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
@@ -40521,6 +41485,11 @@ public final class ClientProtos {
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
+ case 9:
+ this.ping(controller, (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest)request,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -40553,6 +41522,8 @@ public final class ClientProtos {
return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
case 8:
return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
+ case 9:
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -40585,6 +41556,8 @@ public final class ClientProtos {
return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
case 8:
return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
+ case 9:
+ return org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -40740,6 +41713,21 @@ public final class ClientProtos {
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.class,
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()));
}
+
+ public void ping(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest request,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(9),
+ controller,
+ request,
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse.getDefaultInstance(),
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse.class,
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse.getDefaultInstance()));
+ }
}
public static BlockingInterface newBlockingStub(
@@ -40792,6 +41780,11 @@ public final class ClientProtos {
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest request)
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse ping(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest request)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
@@ -40908,6 +41901,18 @@ public final class ClientProtos {
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance());
}
+
+ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse ping(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingRequest request)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(9),
+ controller,
+ request,
+ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PingResponse.getDefaultInstance());
+ }
+
}
// @@protoc_insertion_point(class_scope:hbase.pb.ClientService)
@@ -41093,6 +42098,16 @@ public final class ClientProtos {
private static final
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_hbase_pb_MultiResponse_fieldAccessorTable;
+ private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_PingRequest_descriptor;
+ private static final
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internal_static_hbase_pb_PingRequest_fieldAccessorTable;
+ private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_PingResponse_descriptor;
+ private static final
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internal_static_hbase_pb_PingResponse_fieldAccessorTable;
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@@ -41246,28 +42261,31 @@ public final class ClientProtos {
"dition\"\226\001\n\rMultiResponse\0228\n\022regionAction" +
"Result\030\001 \003(\0132\034.hbase.pb.RegionActionResu" +
"lt\022\021\n\tprocessed\030\002 \001(\010\0228\n\020regionStatistic" +
- "s\030\003 \001(\0132\036.hbase.pb.MultiRegionLoadStats*" +
- "\'\n\013Consistency\022\n\n\006STRONG\020\000\022\014\n\010TIMELINE\020\001" +
- "2\263\005\n\rClientService\0222\n\003Get\022\024.hbase.pb.Get" +
- "Request\032\025.hbase.pb.GetResponse\022;\n\006Mutate" +
- "\022\027.hbase.pb.MutateRequest\032\030.hbase.pb.Mut" +
- "ateResponse\0225\n\004Scan\022\025.hbase.pb.ScanReque",
- "st\032\026.hbase.pb.ScanResponse\022P\n\rBulkLoadHF" +
- "ile\022\036.hbase.pb.BulkLoadHFileRequest\032\037.hb" +
- "ase.pb.BulkLoadHFileResponse\022V\n\017PrepareB" +
- "ulkLoad\022 .hbase.pb.PrepareBulkLoadReques" +
- "t\032!.hbase.pb.PrepareBulkLoadResponse\022V\n\017" +
- "CleanupBulkLoad\022 .hbase.pb.CleanupBulkLo" +
- "adRequest\032!.hbase.pb.CleanupBulkLoadResp" +
- "onse\022X\n\013ExecService\022#.hbase.pb.Coprocess" +
- "orServiceRequest\032$.hbase.pb.CoprocessorS" +
- "erviceResponse\022d\n\027ExecRegionServerServic",
- "e\022#.hbase.pb.CoprocessorServiceRequest\032$" +
- ".hbase.pb.CoprocessorServiceResponse\0228\n\005" +
- "Multi\022\026.hbase.pb.MultiRequest\032\027.hbase.pb" +
- ".MultiResponseBI\n1org.apache.hadoop.hbas" +
- "e.shaded.protobuf.generatedB\014ClientProto" +
- "sH\001\210\001\001\240\001\001"
+ "s\030\003 \001(\0132\036.hbase.pb.MultiRegionLoadStats\"" +
+ " \n\013PingRequest\022\021\n\tprocessed\030\001 \001(\010\"!\n\014Pin" +
+ "gResponse\022\021\n\tprocessed\030\001 \001(\010*\'\n\013Consiste" +
+ "ncy\022\n\n\006STRONG\020\000\022\014\n\010TIMELINE\020\0012\352\005\n\rClient" +
+ "Service\0222\n\003Get\022\024.hbase.pb.GetRequest\032\025.h" +
+ "base.pb.GetResponse\022;\n\006Mutate\022\027.hbase.pb",
+ ".MutateRequest\032\030.hbase.pb.MutateResponse" +
+ "\0225\n\004Scan\022\025.hbase.pb.ScanRequest\032\026.hbase." +
+ "pb.ScanResponse\022P\n\rBulkLoadHFile\022\036.hbase" +
+ ".pb.BulkLoadHFileRequest\032\037.hbase.pb.Bulk" +
+ "LoadHFileResponse\022V\n\017PrepareBulkLoad\022 .h" +
+ "base.pb.PrepareBulkLoadRequest\032!.hbase.p" +
+ "b.PrepareBulkLoadResponse\022V\n\017CleanupBulk" +
+ "Load\022 .hbase.pb.CleanupBulkLoadRequest\032!" +
+ ".hbase.pb.CleanupBulkLoadResponse\022X\n\013Exe" +
+ "cService\022#.hbase.pb.CoprocessorServiceRe",
+ "quest\032$.hbase.pb.CoprocessorServiceRespo" +
+ "nse\022d\n\027ExecRegionServerService\022#.hbase.p" +
+ "b.CoprocessorServiceRequest\032$.hbase.pb.C" +
+ "oprocessorServiceResponse\0228\n\005Multi\022\026.hba" +
+ "se.pb.MultiRequest\032\027.hbase.pb.MultiRespo" +
+ "nse\0225\n\004Ping\022\025.hbase.pb.PingRequest\032\026.hba" +
+ "se.pb.PingResponseBI\n1org.apache.hadoop." +
+ "hbase.shaded.protobuf.generatedB\014ClientP" +
+ "rotosH\001\210\001\001\240\001\001"
};
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
@@ -41502,6 +42520,18 @@ public final class ClientProtos {
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_hbase_pb_MultiResponse_descriptor,
new java.lang.String[] { "RegionActionResult", "Processed", "RegionStatistics", });
+ internal_static_hbase_pb_PingRequest_descriptor =
+ getDescriptor().getMessageTypes().get(33);
+ internal_static_hbase_pb_PingRequest_fieldAccessorTable = new
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+ internal_static_hbase_pb_PingRequest_descriptor,
+ new java.lang.String[] { "Processed", });
+ internal_static_hbase_pb_PingResponse_descriptor =
+ getDescriptor().getMessageTypes().get(34);
+ internal_static_hbase_pb_PingResponse_fieldAccessorTable = new
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+ internal_static_hbase_pb_PingResponse_descriptor,
+ new java.lang.String[] { "Processed", });
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.getDescriptor();
org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.getDescriptor();
org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.getDescriptor();
diff --git a/hbase-protocol-shaded/src/main/protobuf/Client.proto b/hbase-protocol-shaded/src/main/protobuf/Client.proto
index 82bfb70..89c51c0 100644
--- a/hbase-protocol-shaded/src/main/protobuf/Client.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/Client.proto
@@ -499,6 +499,13 @@ message MultiResponse {
optional MultiRegionLoadStats regionStatistics = 3;
}
+message PingRequest {
+ optional bool processed = 1;
+}
+
+message PingResponse {
+ optional bool processed = 1;
+}
service ClientService {
rpc Get(GetRequest)
@@ -527,4 +534,7 @@ service ClientService {
rpc Multi(MultiRequest)
returns(MultiResponse);
+
+ rpc Ping(PingRequest)
+ returns(PingResponse);
}
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
index 1bb57c4..976bedb 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
@@ -39188,6 +39188,874 @@ public final class ClientProtos {
// @@protoc_insertion_point(class_scope:hbase.pb.MultiResponse)
}
+ public interface PingRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional bool processed = 1;
+ /**
+ * optional bool processed = 1;
+ */
+ boolean hasProcessed();
+ /**
+ * optional bool processed = 1;
+ */
+ boolean getProcessed();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.PingRequest}
+ */
+ public static final class PingRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements PingRequestOrBuilder {
+ // Use PingRequest.newBuilder() to construct.
+ private PingRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private PingRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final PingRequest defaultInstance;
+ public static PingRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public PingRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private PingRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ processed_ = input.readBool();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public PingRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new PingRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // optional bool processed = 1;
+ public static final int PROCESSED_FIELD_NUMBER = 1;
+ private boolean processed_;
+ /**
+ * optional bool processed = 1;
+ */
+ public boolean hasProcessed() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional bool processed = 1;
+ */
+ public boolean getProcessed() {
+ return processed_;
+ }
+
+ private void initFields() {
+ processed_ = false;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBool(1, processed_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBoolSize(1, processed_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest) obj;
+
+ boolean result = true;
+ result = result && (hasProcessed() == other.hasProcessed());
+ if (hasProcessed()) {
+ result = result && (getProcessed()
+ == other.getProcessed());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasProcessed()) {
+ hash = (37 * hash) + PROCESSED_FIELD_NUMBER;
+ hash = (53 * hash) + hashBoolean(getProcessed());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.PingRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ processed_ = false;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingRequest_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.processed_ = processed_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest.getDefaultInstance()) return this;
+ if (other.hasProcessed()) {
+ setProcessed(other.getProcessed());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // optional bool processed = 1;
+ private boolean processed_ ;
+ /**
+ * optional bool processed = 1;
+ */
+ public boolean hasProcessed() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional bool processed = 1;
+ */
+ public boolean getProcessed() {
+ return processed_;
+ }
+ /**
+ * optional bool processed = 1;
+ */
+ public Builder setProcessed(boolean value) {
+ bitField0_ |= 0x00000001;
+ processed_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional bool processed = 1;
+ */
+ public Builder clearProcessed() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ processed_ = false;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.PingRequest)
+ }
+
+ static {
+ defaultInstance = new PingRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.PingRequest)
+ }
+
+ public interface PingResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional bool processed = 1;
+ /**
+ * optional bool processed = 1;
+ */
+ boolean hasProcessed();
+ /**
+ * optional bool processed = 1;
+ */
+ boolean getProcessed();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.PingResponse}
+ */
+ public static final class PingResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements PingResponseOrBuilder {
+ // Use PingResponse.newBuilder() to construct.
+ private PingResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private PingResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final PingResponse defaultInstance;
+ public static PingResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public PingResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private PingResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ processed_ = input.readBool();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public PingResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new PingResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // optional bool processed = 1;
+ public static final int PROCESSED_FIELD_NUMBER = 1;
+ private boolean processed_;
+ /**
+ * optional bool processed = 1;
+ */
+ public boolean hasProcessed() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional bool processed = 1;
+ */
+ public boolean getProcessed() {
+ return processed_;
+ }
+
+ private void initFields() {
+ processed_ = false;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBool(1, processed_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBoolSize(1, processed_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse) obj;
+
+ boolean result = true;
+ result = result && (hasProcessed() == other.hasProcessed());
+ if (hasProcessed()) {
+ result = result && (getProcessed()
+ == other.getProcessed());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasProcessed()) {
+ hash = (37 * hash) + PROCESSED_FIELD_NUMBER;
+ hash = (53 * hash) + hashBoolean(getProcessed());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.PingResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ processed_ = false;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_hbase_pb_PingResponse_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.processed_ = processed_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse.getDefaultInstance()) return this;
+ if (other.hasProcessed()) {
+ setProcessed(other.getProcessed());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // optional bool processed = 1;
+ private boolean processed_ ;
+ /**
+ * optional bool processed = 1;
+ */
+ public boolean hasProcessed() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional bool processed = 1;
+ */
+ public boolean getProcessed() {
+ return processed_;
+ }
+ /**
+ * optional bool processed = 1;
+ */
+ public Builder setProcessed(boolean value) {
+ bitField0_ |= 0x00000001;
+ processed_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional bool processed = 1;
+ */
+ public Builder clearProcessed() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ processed_ = false;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.PingResponse)
+ }
+
+ static {
+ defaultInstance = new PingResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.PingResponse)
+ }
+
/**
* Protobuf service {@code hbase.pb.ClientService}
*/
@@ -39268,6 +40136,14 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
com.google.protobuf.RpcCallback done);
+ /**
+ * rpc Ping(.hbase.pb.PingRequest) returns (.hbase.pb.PingResponse);
+ */
+ public abstract void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest request,
+ com.google.protobuf.RpcCallback done);
+
}
public static com.google.protobuf.Service newReflectiveService(
@@ -39345,6 +40221,14 @@ public final class ClientProtos {
impl.multi(controller, request, done);
}
+ @java.lang.Override
+ public void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest request,
+ com.google.protobuf.RpcCallback done) {
+ impl.ping(controller, request, done);
+ }
+
};
}
@@ -39385,6 +40269,8 @@ public final class ClientProtos {
return impl.execRegionServerService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
case 8:
return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request);
+ case 9:
+ return impl.ping(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -39417,6 +40303,8 @@ public final class ClientProtos {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
case 8:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
+ case 9:
+ return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -39449,6 +40337,8 @@ public final class ClientProtos {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
case 8:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
+ case 9:
+ return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -39529,6 +40419,14 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
com.google.protobuf.RpcCallback done);
+ /**
+ * rpc Ping(.hbase.pb.PingRequest) returns (.hbase.pb.PingResponse);
+ */
+ public abstract void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest request,
+ com.google.protobuf.RpcCallback done);
+
public static final
com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
@@ -39596,6 +40494,11 @@ public final class ClientProtos {
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
+ case 9:
+ this.ping(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -39628,6 +40531,8 @@ public final class ClientProtos {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
case 8:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
+ case 9:
+ return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -39660,6 +40565,8 @@ public final class ClientProtos {
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
case 8:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
+ case 9:
+ return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -39815,6 +40722,21 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()));
}
+
+ public void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(9),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse.getDefaultInstance()));
+ }
}
public static BlockingInterface newBlockingStub(
@@ -39867,6 +40789,11 @@ public final class ClientProtos {
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request)
throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest request)
+ throws com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
@@ -39983,6 +40910,18 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance());
}
+
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(9),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PingResponse.getDefaultInstance());
+ }
+
}
// @@protoc_insertion_point(class_scope:hbase.pb.ClientService)
@@ -40168,6 +41107,16 @@ public final class ClientProtos {
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_MultiResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_PingRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_PingRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_PingResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_PingResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@@ -40321,28 +41270,31 @@ public final class ClientProtos {
"dition\"\226\001\n\rMultiResponse\0228\n\022regionAction" +
"Result\030\001 \003(\0132\034.hbase.pb.RegionActionResu" +
"lt\022\021\n\tprocessed\030\002 \001(\010\0228\n\020regionStatistic" +
- "s\030\003 \001(\0132\036.hbase.pb.MultiRegionLoadStats*" +
- "\'\n\013Consistency\022\n\n\006STRONG\020\000\022\014\n\010TIMELINE\020\001" +
- "2\263\005\n\rClientService\0222\n\003Get\022\024.hbase.pb.Get" +
- "Request\032\025.hbase.pb.GetResponse\022;\n\006Mutate" +
- "\022\027.hbase.pb.MutateRequest\032\030.hbase.pb.Mut" +
- "ateResponse\0225\n\004Scan\022\025.hbase.pb.ScanReque",
- "st\032\026.hbase.pb.ScanResponse\022P\n\rBulkLoadHF" +
- "ile\022\036.hbase.pb.BulkLoadHFileRequest\032\037.hb" +
- "ase.pb.BulkLoadHFileResponse\022V\n\017PrepareB" +
- "ulkLoad\022 .hbase.pb.PrepareBulkLoadReques" +
- "t\032!.hbase.pb.PrepareBulkLoadResponse\022V\n\017" +
- "CleanupBulkLoad\022 .hbase.pb.CleanupBulkLo" +
- "adRequest\032!.hbase.pb.CleanupBulkLoadResp" +
- "onse\022X\n\013ExecService\022#.hbase.pb.Coprocess" +
- "orServiceRequest\032$.hbase.pb.CoprocessorS" +
- "erviceResponse\022d\n\027ExecRegionServerServic",
- "e\022#.hbase.pb.CoprocessorServiceRequest\032$" +
- ".hbase.pb.CoprocessorServiceResponse\0228\n\005" +
- "Multi\022\026.hbase.pb.MultiRequest\032\027.hbase.pb" +
- ".MultiResponseBB\n*org.apache.hadoop.hbas" +
- "e.protobuf.generatedB\014ClientProtosH\001\210\001\001\240" +
- "\001\001"
+ "s\030\003 \001(\0132\036.hbase.pb.MultiRegionLoadStats\"" +
+ " \n\013PingRequest\022\021\n\tprocessed\030\001 \001(\010\"!\n\014Pin" +
+ "gResponse\022\021\n\tprocessed\030\001 \001(\010*\'\n\013Consiste" +
+ "ncy\022\n\n\006STRONG\020\000\022\014\n\010TIMELINE\020\0012\352\005\n\rClient" +
+ "Service\0222\n\003Get\022\024.hbase.pb.GetRequest\032\025.h" +
+ "base.pb.GetResponse\022;\n\006Mutate\022\027.hbase.pb",
+ ".MutateRequest\032\030.hbase.pb.MutateResponse" +
+ "\0225\n\004Scan\022\025.hbase.pb.ScanRequest\032\026.hbase." +
+ "pb.ScanResponse\022P\n\rBulkLoadHFile\022\036.hbase" +
+ ".pb.BulkLoadHFileRequest\032\037.hbase.pb.Bulk" +
+ "LoadHFileResponse\022V\n\017PrepareBulkLoad\022 .h" +
+ "base.pb.PrepareBulkLoadRequest\032!.hbase.p" +
+ "b.PrepareBulkLoadResponse\022V\n\017CleanupBulk" +
+ "Load\022 .hbase.pb.CleanupBulkLoadRequest\032!" +
+ ".hbase.pb.CleanupBulkLoadResponse\022X\n\013Exe" +
+ "cService\022#.hbase.pb.CoprocessorServiceRe",
+ "quest\032$.hbase.pb.CoprocessorServiceRespo" +
+ "nse\022d\n\027ExecRegionServerService\022#.hbase.p" +
+ "b.CoprocessorServiceRequest\032$.hbase.pb.C" +
+ "oprocessorServiceResponse\0228\n\005Multi\022\026.hba" +
+ "se.pb.MultiRequest\032\027.hbase.pb.MultiRespo" +
+ "nse\0225\n\004Ping\022\025.hbase.pb.PingRequest\032\026.hba" +
+ "se.pb.PingResponseBB\n*org.apache.hadoop." +
+ "hbase.protobuf.generatedB\014ClientProtosH\001" +
+ "\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -40565,6 +41517,18 @@ public final class ClientProtos {
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_MultiResponse_descriptor,
new java.lang.String[] { "RegionActionResult", "Processed", "RegionStatistics", });
+ internal_static_hbase_pb_PingRequest_descriptor =
+ getDescriptor().getMessageTypes().get(33);
+ internal_static_hbase_pb_PingRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_PingRequest_descriptor,
+ new java.lang.String[] { "Processed", });
+ internal_static_hbase_pb_PingResponse_descriptor =
+ getDescriptor().getMessageTypes().get(34);
+ internal_static_hbase_pb_PingResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_PingResponse_descriptor,
+ new java.lang.String[] { "Processed", });
return null;
}
};
diff --git a/hbase-protocol/src/main/protobuf/Client.proto b/hbase-protocol/src/main/protobuf/Client.proto
index bbb2c45..bed99f0 100644
--- a/hbase-protocol/src/main/protobuf/Client.proto
+++ b/hbase-protocol/src/main/protobuf/Client.proto
@@ -499,6 +499,13 @@ message MultiResponse {
optional MultiRegionLoadStats regionStatistics = 3;
}
+message PingRequest {
+ optional bool processed = 1;
+}
+
+message PingResponse {
+ optional bool processed = 1;
+}
service ClientService {
rpc Get(GetRequest)
@@ -527,4 +534,7 @@ service ClientService {
rpc Multi(MultiRequest)
returns(MultiResponse);
+
+ rpc Ping(PingRequest)
+ returns(PingResponse);
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 7312852..83a20e1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -3161,6 +3161,14 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
}
@Override
+ public ClientProtos.PingResponse ping(RpcController controller, ClientProtos.PingRequest request)
+ throws ServiceException {
+ ClientProtos.PingResponse.Builder pingResponse = ClientProtos.PingResponse.newBuilder();
+ pingResponse.setProcessed(true);
+ return pingResponse.build();
+ }
+
+ @Override
public CoprocessorServiceResponse execRegionServerService(RpcController controller,
CoprocessorServiceRequest request) throws ServiceException {
return regionServer.execRegionServerService(controller, request);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
index b653e3f..59a30a1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
@@ -437,6 +437,14 @@ ClientProtos.ClientService.BlockingInterface, RegionServerServices {
}
@Override
+ public ClientProtos.PingResponse ping(RpcController controller, ClientProtos.PingRequest request)
+ throws ServiceException {
+ ClientProtos.PingResponse.Builder pingResponse = ClientProtos.PingResponse.newBuilder();
+ pingResponse.setProcessed(true);
+ return pingResponse.build();
+ }
+
+ @Override
public GetRegionInfoResponse getRegionInfo(RpcController controller,
GetRegionInfoRequest request) throws ServiceException {
GetRegionInfoResponse.Builder builder = GetRegionInfoResponse.newBuilder();