diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java index d900c0a..90471b0 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java @@ -1110,6 +1110,14 @@ public final class RPCProtos { boolean hasTinfo(); org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTinfo(); org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTinfoOrBuilder(); + + // required string methodName = 3; + boolean hasMethodName(); + String getMethodName(); + + // optional string requestClassName = 4; + boolean hasRequestClassName(); + String getRequestClassName(); } public static final class RpcRequestHeader extends com.google.protobuf.GeneratedMessage @@ -1163,9 +1171,75 @@ public final class RPCProtos { return tinfo_; } + // required string methodName = 3; + public static final int METHODNAME_FIELD_NUMBER = 3; + private java.lang.Object methodName_; + public boolean hasMethodName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getMethodName() { + java.lang.Object ref = methodName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + methodName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getMethodNameBytes() { + java.lang.Object ref = methodName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + methodName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string requestClassName = 4; + public static final int REQUESTCLASSNAME_FIELD_NUMBER = 4; + private java.lang.Object requestClassName_; + public boolean hasRequestClassName() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public String getRequestClassName() { + java.lang.Object ref = requestClassName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + requestClassName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getRequestClassNameBytes() { + java.lang.Object ref = requestClassName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + requestClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private void initFields() { callId_ = 0; tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); + methodName_ = ""; + requestClassName_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -1176,6 +1250,10 @@ public final class RPCProtos { memoizedIsInitialized = 0; return false; } + if (!hasMethodName()) { + memoizedIsInitialized = 0; + return false; + } memoizedIsInitialized = 1; return true; } @@ -1189,6 +1267,12 @@ public final class RPCProtos { if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, tinfo_); } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getMethodNameBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(4, getRequestClassNameBytes()); + } getUnknownFields().writeTo(output); } @@ -1206,6 +1290,14 @@ public final class RPCProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, tinfo_); } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getMethodNameBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, getRequestClassNameBytes()); + } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -1239,6 +1331,16 @@ public final class RPCProtos { result = result && getTinfo() .equals(other.getTinfo()); } + result = result && (hasMethodName() == other.hasMethodName()); + if (hasMethodName()) { + result = result && getMethodName() + .equals(other.getMethodName()); + } + result = result && (hasRequestClassName() == other.hasRequestClassName()); + if (hasRequestClassName()) { + result = result && getRequestClassName() + .equals(other.getRequestClassName()); + } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -1256,6 +1358,14 @@ public final class RPCProtos { hash = (37 * hash) + TINFO_FIELD_NUMBER; hash = (53 * hash) + getTinfo().hashCode(); } + if (hasMethodName()) { + hash = (37 * hash) + METHODNAME_FIELD_NUMBER; + hash = (53 * hash) + getMethodName().hashCode(); + } + if (hasRequestClassName()) { + hash = (37 * hash) + REQUESTCLASSNAME_FIELD_NUMBER; + hash = (53 * hash) + getRequestClassName().hashCode(); + } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } @@ -1381,6 +1491,10 @@ public final class RPCProtos { tinfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); + methodName_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + requestClassName_ = ""; + bitField0_ = (bitField0_ & ~0x00000008); return this; } @@ -1431,6 +1545,14 @@ public final class RPCProtos { } else { result.tinfo_ = tinfoBuilder_.build(); } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.methodName_ = methodName_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.requestClassName_ = requestClassName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -1453,6 +1575,12 @@ public final class RPCProtos { if (other.hasTinfo()) { mergeTinfo(other.getTinfo()); } + if (other.hasMethodName()) { + setMethodName(other.getMethodName()); + } + if (other.hasRequestClassName()) { + setRequestClassName(other.getRequestClassName()); + } this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -1462,6 +1590,10 @@ public final class RPCProtos { return false; } + if (!hasMethodName()) { + + return false; + } return true; } @@ -1502,6 +1634,16 @@ public final class RPCProtos { setTinfo(subBuilder.buildPartial()); break; } + case 26: { + bitField0_ |= 0x00000004; + methodName_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + requestClassName_ = input.readBytes(); + break; + } } } } @@ -1619,6 +1761,78 @@ public final class RPCProtos { return tinfoBuilder_; } + // required string methodName = 3; + private java.lang.Object methodName_ = ""; + public boolean hasMethodName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getMethodName() { + java.lang.Object ref = methodName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + methodName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setMethodName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + methodName_ = value; + onChanged(); + return this; + } + public Builder clearMethodName() { + bitField0_ = (bitField0_ & ~0x00000004); + methodName_ = getDefaultInstance().getMethodName(); + onChanged(); + return this; + } + void setMethodName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000004; + methodName_ = value; + onChanged(); + } + + // optional string requestClassName = 4; + private java.lang.Object requestClassName_ = ""; + public boolean hasRequestClassName() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public String getRequestClassName() { + java.lang.Object ref = requestClassName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + requestClassName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setRequestClassName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + requestClassName_ = value; + onChanged(); + return this; + } + public Builder clearRequestClassName() { + bitField0_ = (bitField0_ & ~0x00000008); + requestClassName_ = getDefaultInstance().getRequestClassName(); + onChanged(); + return this; + } + void setRequestClassName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000008; + requestClassName_ = value; + onChanged(); + } + // @@protoc_insertion_point(builder_scope:RpcRequestHeader) } @@ -1630,1182 +1844,170 @@ public final class RPCProtos { // @@protoc_insertion_point(class_scope:RpcRequestHeader) } - public interface RpcRequestBodyOrBuilder + public interface RpcResponseHeaderOrBuilder extends com.google.protobuf.MessageOrBuilder { - // required string methodName = 1; - boolean hasMethodName(); - String getMethodName(); + // required uint32 callId = 1; + boolean hasCallId(); + int getCallId(); - // optional bytes request = 2; - boolean hasRequest(); - com.google.protobuf.ByteString getRequest(); + // required .RpcResponseHeader.Status status = 2; + boolean hasStatus(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus(); - // optional string requestClassName = 4; - boolean hasRequestClassName(); - String getRequestClassName(); + // optional bytes responseBodyType = 3; + boolean hasResponseBodyType(); + com.google.protobuf.ByteString getResponseBodyType(); } - public static final class RpcRequestBody extends + public static final class RpcResponseHeader extends com.google.protobuf.GeneratedMessage - implements RpcRequestBodyOrBuilder { - // Use RpcRequestBody.newBuilder() to construct. - private RpcRequestBody(Builder builder) { + implements RpcResponseHeaderOrBuilder { + // Use RpcResponseHeader.newBuilder() to construct. + private RpcResponseHeader(Builder builder) { super(builder); } - private RpcRequestBody(boolean noInit) {} + private RpcResponseHeader(boolean noInit) {} - private static final RpcRequestBody defaultInstance; - public static RpcRequestBody getDefaultInstance() { + private static final RpcResponseHeader defaultInstance; + public static RpcResponseHeader getDefaultInstance() { return defaultInstance; } - public RpcRequestBody getDefaultInstanceForType() { + public RpcResponseHeader getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_fieldAccessorTable; } - private int bitField0_; - // required string methodName = 1; - public static final int METHODNAME_FIELD_NUMBER = 1; - private java.lang.Object methodName_; - public boolean hasMethodName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getMethodName() { - java.lang.Object ref = methodName_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - methodName_ = s; + public enum Status + implements com.google.protobuf.ProtocolMessageEnum { + SUCCESS(0, 0), + ERROR(1, 1), + FATAL(2, 2), + ; + + public static final int SUCCESS_VALUE = 0; + public static final int ERROR_VALUE = 1; + public static final int FATAL_VALUE = 2; + + + public final int getNumber() { return value; } + + public static Status valueOf(int value) { + switch (value) { + case 0: return SUCCESS; + case 1: return ERROR; + case 2: return FATAL; + default: return null; } - return s; - } - } - private com.google.protobuf.ByteString getMethodNameBytes() { - java.lang.Object ref = methodName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - methodName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional bytes request = 2; - public static final int REQUEST_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString request_; - public boolean hasRequest() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getRequest() { - return request_; - } - - // optional string requestClassName = 4; - public static final int REQUESTCLASSNAME_FIELD_NUMBER = 4; - private java.lang.Object requestClassName_; - public boolean hasRequestClassName() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public String getRequestClassName() { - java.lang.Object ref = requestClassName_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - requestClassName_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getRequestClassNameBytes() { - java.lang.Object ref = requestClassName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - requestClassName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - methodName_ = ""; - request_ = com.google.protobuf.ByteString.EMPTY; - requestClassName_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasMethodName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getMethodNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, request_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(4, getRequestClassNameBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getMethodNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, request_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, getRequestClassNameBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody) obj; - - boolean result = true; - result = result && (hasMethodName() == other.hasMethodName()); - if (hasMethodName()) { - result = result && getMethodName() - .equals(other.getMethodName()); - } - result = result && (hasRequest() == other.hasRequest()); - if (hasRequest()) { - result = result && getRequest() - .equals(other.getRequest()); - } - result = result && (hasRequestClassName() == other.hasRequestClassName()); - if (hasRequestClassName()) { - result = result && getRequestClassName() - .equals(other.getRequestClassName()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasMethodName()) { - hash = (37 * hash) + METHODNAME_FIELD_NUMBER; - hash = (53 * hash) + getMethodName().hashCode(); - } - if (hasRequest()) { - hash = (37 * hash) + REQUEST_FIELD_NUMBER; - hash = (53 * hash) + getRequest().hashCode(); - } - if (hasRequestClassName()) { - hash = (37 * hash) + REQUESTCLASSNAME_FIELD_NUMBER; - hash = (53 * hash) + getRequestClassName().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBodyOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - methodName_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - request_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - requestClassName_ = ""; - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.methodName_ = methodName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.request_ = request_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.requestClassName_ = requestClassName_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.getDefaultInstance()) return this; - if (other.hasMethodName()) { - setMethodName(other.getMethodName()); - } - if (other.hasRequest()) { - setRequest(other.getRequest()); - } - if (other.hasRequestClassName()) { - setRequestClassName(other.getRequestClassName()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasMethodName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - methodName_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - request_ = input.readBytes(); - break; - } - case 34: { - bitField0_ |= 0x00000004; - requestClassName_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required string methodName = 1; - private java.lang.Object methodName_ = ""; - public boolean hasMethodName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getMethodName() { - java.lang.Object ref = methodName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - methodName_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setMethodName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - methodName_ = value; - onChanged(); - return this; - } - public Builder clearMethodName() { - bitField0_ = (bitField0_ & ~0x00000001); - methodName_ = getDefaultInstance().getMethodName(); - onChanged(); - return this; - } - void setMethodName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - methodName_ = value; - onChanged(); - } - - // optional bytes request = 2; - private com.google.protobuf.ByteString request_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRequest() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getRequest() { - return request_; - } - public Builder setRequest(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - request_ = value; - onChanged(); - return this; - } - public Builder clearRequest() { - bitField0_ = (bitField0_ & ~0x00000002); - request_ = getDefaultInstance().getRequest(); - onChanged(); - return this; - } - - // optional string requestClassName = 4; - private java.lang.Object requestClassName_ = ""; - public boolean hasRequestClassName() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public String getRequestClassName() { - java.lang.Object ref = requestClassName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - requestClassName_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setRequestClassName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - requestClassName_ = value; - onChanged(); - return this; - } - public Builder clearRequestClassName() { - bitField0_ = (bitField0_ & ~0x00000004); - requestClassName_ = getDefaultInstance().getRequestClassName(); - onChanged(); - return this; - } - void setRequestClassName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000004; - requestClassName_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:RpcRequestBody) - } - - static { - defaultInstance = new RpcRequestBody(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RpcRequestBody) - } - - public interface RpcResponseHeaderOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required uint32 callId = 1; - boolean hasCallId(); - int getCallId(); - - // required .RpcResponseHeader.Status status = 2; - boolean hasStatus(); - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus(); - } - public static final class RpcResponseHeader extends - com.google.protobuf.GeneratedMessage - implements RpcResponseHeaderOrBuilder { - // Use RpcResponseHeader.newBuilder() to construct. - private RpcResponseHeader(Builder builder) { - super(builder); - } - private RpcResponseHeader(boolean noInit) {} - - private static final RpcResponseHeader defaultInstance; - public static RpcResponseHeader getDefaultInstance() { - return defaultInstance; - } - - public RpcResponseHeader getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_fieldAccessorTable; - } - - public enum Status - implements com.google.protobuf.ProtocolMessageEnum { - SUCCESS(0, 0), - ERROR(1, 1), - FATAL(2, 2), - ; - - public static final int SUCCESS_VALUE = 0; - public static final int ERROR_VALUE = 1; - public static final int FATAL_VALUE = 2; - - - public final int getNumber() { return value; } - - public static Status valueOf(int value) { - switch (value) { - case 0: return SUCCESS; - case 1: return ERROR; - case 2: return FATAL; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public Status findValueByNumber(int number) { - return Status.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDescriptor().getEnumTypes().get(0); - } - - private static final Status[] VALUES = { - SUCCESS, ERROR, FATAL, - }; - - public static Status valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private Status(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:RpcResponseHeader.Status) - } - - private int bitField0_; - // required uint32 callId = 1; - public static final int CALLID_FIELD_NUMBER = 1; - private int callId_; - public boolean hasCallId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getCallId() { - return callId_; - } - - // required .RpcResponseHeader.Status status = 2; - public static final int STATUS_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status status_; - public boolean hasStatus() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus() { - return status_; - } - - private void initFields() { - callId_ = 0; - status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasCallId()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasStatus()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt32(1, callId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, status_.getNumber()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(1, callId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, status_.getNumber()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader) obj; - - boolean result = true; - result = result && (hasCallId() == other.hasCallId()); - if (hasCallId()) { - result = result && (getCallId() - == other.getCallId()); - } - result = result && (hasStatus() == other.hasStatus()); - if (hasStatus()) { - result = result && - (getStatus() == other.getStatus()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasCallId()) { - hash = (37 * hash) + CALLID_FIELD_NUMBER; - hash = (53 * hash) + getCallId(); - } - if (hasStatus()) { - hash = (37 * hash) + STATUS_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getStatus()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeaderOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - callId_ = 0; - bitField0_ = (bitField0_ & ~0x00000001); - status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.callId_ = callId_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.status_ = status_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDefaultInstance()) return this; - if (other.hasCallId()) { - setCallId(other.getCallId()); - } - if (other.hasStatus()) { - setStatus(other.getStatus()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; } - public final boolean isInitialized() { - if (!hasCallId()) { - - return false; - } - if (!hasStatus()) { - - return false; - } - return true; + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - callId_ = input.readUInt32(); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status value = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - status_ = value; + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Status findValueByNumber(int number) { + return Status.valueOf(number); } - break; - } - } - } - } - - private int bitField0_; - - // required uint32 callId = 1; - private int callId_ ; - public boolean hasCallId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getCallId() { - return callId_; - } - public Builder setCallId(int value) { - bitField0_ |= 0x00000001; - callId_ = value; - onChanged(); - return this; - } - public Builder clearCallId() { - bitField0_ = (bitField0_ & ~0x00000001); - callId_ = 0; - onChanged(); - return this; - } + }; - // required .RpcResponseHeader.Status status = 2; - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; - public boolean hasStatus() { - return ((bitField0_ & 0x00000002) == 0x00000002); + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus() { - return status_; + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); } - public Builder setStatus(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status value) { - if (value == null) { - throw new NullPointerException(); + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDescriptor().getEnumTypes().get(0); + } + + private static final Status[] VALUES = { + SUCCESS, ERROR, FATAL, + }; + + public static Status valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); } - bitField0_ |= 0x00000002; - status_ = value; - onChanged(); - return this; + return VALUES[desc.getIndex()]; } - public Builder clearStatus() { - bitField0_ = (bitField0_ & ~0x00000002); - status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; - onChanged(); - return this; + + private final int index; + private final int value; + + private Status(int index, int value) { + this.index = index; + this.value = value; } - // @@protoc_insertion_point(builder_scope:RpcResponseHeader) - } - - static { - defaultInstance = new RpcResponseHeader(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RpcResponseHeader) - } - - public interface RpcResponseBodyOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional bytes response = 1; - boolean hasResponse(); - com.google.protobuf.ByteString getResponse(); - } - public static final class RpcResponseBody extends - com.google.protobuf.GeneratedMessage - implements RpcResponseBodyOrBuilder { - // Use RpcResponseBody.newBuilder() to construct. - private RpcResponseBody(Builder builder) { - super(builder); + // @@protoc_insertion_point(enum_scope:RpcResponseHeader.Status) } - private RpcResponseBody(boolean noInit) {} - private static final RpcResponseBody defaultInstance; - public static RpcResponseBody getDefaultInstance() { - return defaultInstance; + private int bitField0_; + // required uint32 callId = 1; + public static final int CALLID_FIELD_NUMBER = 1; + private int callId_; + public boolean hasCallId() { + return ((bitField0_ & 0x00000001) == 0x00000001); } - - public RpcResponseBody getDefaultInstanceForType() { - return defaultInstance; + public int getCallId() { + return callId_; } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_descriptor; + // required .RpcResponseHeader.Status status = 2; + public static final int STATUS_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status status_; + public boolean hasStatus() { + return ((bitField0_ & 0x00000002) == 0x00000002); } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_fieldAccessorTable; + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus() { + return status_; } - private int bitField0_; - // optional bytes response = 1; - public static final int RESPONSE_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString response_; - public boolean hasResponse() { - return ((bitField0_ & 0x00000001) == 0x00000001); + // optional bytes responseBodyType = 3; + public static final int RESPONSEBODYTYPE_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString responseBodyType_; + public boolean hasResponseBodyType() { + return ((bitField0_ & 0x00000004) == 0x00000004); } - public com.google.protobuf.ByteString getResponse() { - return response_; + public com.google.protobuf.ByteString getResponseBodyType() { + return responseBodyType_; } private void initFields() { - response_ = com.google.protobuf.ByteString.EMPTY; + callId_ = 0; + status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; + responseBodyType_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; + if (!hasCallId()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasStatus()) { + memoizedIsInitialized = 0; + return false; + } memoizedIsInitialized = 1; return true; } @@ -2814,7 +2016,13 @@ public final class RPCProtos { throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, response_); + output.writeUInt32(1, callId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeEnum(2, status_.getNumber()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, responseBodyType_); } getUnknownFields().writeTo(output); } @@ -2827,7 +2035,15 @@ public final class RPCProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, response_); + .computeUInt32Size(1, callId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(2, status_.getNumber()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, responseBodyType_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -2846,16 +2062,26 @@ public final class RPCProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody) obj; + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader) obj; boolean result = true; - result = result && (hasResponse() == other.hasResponse()); - if (hasResponse()) { - result = result && getResponse() - .equals(other.getResponse()); + result = result && (hasCallId() == other.hasCallId()); + if (hasCallId()) { + result = result && (getCallId() + == other.getCallId()); + } + result = result && (hasStatus() == other.hasStatus()); + if (hasStatus()) { + result = result && + (getStatus() == other.getStatus()); + } + result = result && (hasResponseBodyType() == other.hasResponseBodyType()); + if (hasResponseBodyType()) { + result = result && getResponseBodyType() + .equals(other.getResponseBodyType()); } result = result && getUnknownFields().equals(other.getUnknownFields()); @@ -2866,49 +2092,57 @@ public final class RPCProtos { public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasResponse()) { - hash = (37 * hash) + RESPONSE_FIELD_NUMBER; - hash = (53 * hash) + getResponse().hashCode(); + if (hasCallId()) { + hash = (37 * hash) + CALLID_FIELD_NUMBER; + hash = (53 * hash) + getCallId(); + } + if (hasStatus()) { + hash = (37 * hash) + STATUS_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getStatus()); + } + if (hasResponseBodyType()) { + hash = (37 * hash) + RESPONSEBODYTYPE_FIELD_NUMBER; + hash = (53 * hash) + getResponseBodyType().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -2917,7 +2151,7 @@ public final class RPCProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2928,12 +2162,12 @@ public final class RPCProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2943,7 +2177,7 @@ public final class RPCProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -2956,18 +2190,18 @@ public final class RPCProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBodyOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeaderOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -2986,8 +2220,12 @@ public final class RPCProtos { public Builder clear() { super.clear(); - response_ = com.google.protobuf.ByteString.EMPTY; + callId_ = 0; bitField0_ = (bitField0_ & ~0x00000001); + status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; + bitField0_ = (bitField0_ & ~0x00000002); + responseBodyType_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); return this; } @@ -2997,24 +2235,24 @@ public final class RPCProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -3022,38 +2260,60 @@ public final class RPCProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody(this); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.response_ = response_; + result.callId_ = callId_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.status_ = status_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.responseBodyType_ = responseBodyType_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.getDefaultInstance()) return this; - if (other.hasResponse()) { - setResponse(other.getResponse()); + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDefaultInstance()) return this; + if (other.hasCallId()) { + setCallId(other.getCallId()); + } + if (other.hasStatus()) { + setStatus(other.getStatus()); + } + if (other.hasResponseBodyType()) { + setResponseBodyType(other.getResponseBodyType()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { + if (!hasCallId()) { + + return false; + } + if (!hasStatus()) { + + return false; + } return true; } @@ -3080,9 +2340,25 @@ public final class RPCProtos { } break; } - case 10: { + case 8: { bitField0_ |= 0x00000001; - response_ = input.readBytes(); + callId_ = input.readUInt32(); + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status value = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + status_ = value; + } + break; + } + case 26: { + bitField0_ |= 0x00000004; + responseBodyType_ = input.readBytes(); break; } } @@ -3091,39 +2367,84 @@ public final class RPCProtos { private int bitField0_; - // optional bytes response = 1; - private com.google.protobuf.ByteString response_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasResponse() { + // required uint32 callId = 1; + private int callId_ ; + public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public com.google.protobuf.ByteString getResponse() { - return response_; + public int getCallId() { + return callId_; + } + public Builder setCallId(int value) { + bitField0_ |= 0x00000001; + callId_ = value; + onChanged(); + return this; + } + public Builder clearCallId() { + bitField0_ = (bitField0_ & ~0x00000001); + callId_ = 0; + onChanged(); + return this; + } + + // required .RpcResponseHeader.Status status = 2; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; + public boolean hasStatus() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus() { + return status_; + } + public Builder setStatus(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + status_ = value; + onChanged(); + return this; + } + public Builder clearStatus() { + bitField0_ = (bitField0_ & ~0x00000002); + status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; + onChanged(); + return this; + } + + // optional bytes responseBodyType = 3; + private com.google.protobuf.ByteString responseBodyType_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasResponseBodyType() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getResponseBodyType() { + return responseBodyType_; } - public Builder setResponse(com.google.protobuf.ByteString value) { + public Builder setResponseBodyType(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } - bitField0_ |= 0x00000001; - response_ = value; + bitField0_ |= 0x00000004; + responseBodyType_ = value; onChanged(); return this; } - public Builder clearResponse() { - bitField0_ = (bitField0_ & ~0x00000001); - response_ = getDefaultInstance().getResponse(); + public Builder clearResponseBodyType() { + bitField0_ = (bitField0_ & ~0x00000004); + responseBodyType_ = getDefaultInstance().getResponseBodyType(); onChanged(); return this; } - // @@protoc_insertion_point(builder_scope:RpcResponseBody) + // @@protoc_insertion_point(builder_scope:RpcResponseHeader) } static { - defaultInstance = new RpcResponseBody(true); + defaultInstance = new RpcResponseHeader(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:RpcResponseBody) + // @@protoc_insertion_point(class_scope:RpcResponseHeader) } public interface RpcExceptionOrBuilder @@ -3661,21 +2982,11 @@ public final class RPCProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_RpcRequestHeader_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_RpcRequestBody_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RpcRequestBody_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor internal_static_RpcResponseHeader_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_RpcResponseHeader_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_RpcResponseBody_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RpcResponseBody_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor internal_static_RpcException_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable @@ -3694,17 +3005,16 @@ public final class RPCProtos { "\030\002 \001(\t\"w\n\020ConnectionHeader\022\"\n\010userInfo\030\001" + " \001(\0132\020.UserInformation\022?\n\010protocol\030\002 \001(\t" + ":-org.apache.hadoop.hbase.client.ClientP" + - "rotocol\"<\n\020RpcRequestHeader\022\016\n\006callId\030\001 " + - "\002(\r\022\030\n\005tinfo\030\002 \001(\0132\t.RPCTInfo\"O\n\016RpcRequ" + - "estBody\022\022\n\nmethodName\030\001 \002(\t\022\017\n\007request\030\002" + - " \001(\014\022\030\n\020requestClassName\030\004 \001(\t\"{\n\021RpcRes" + - "ponseHeader\022\016\n\006callId\030\001 \002(\r\022)\n\006status\030\002 ", - "\002(\0162\031.RpcResponseHeader.Status\"+\n\006Status" + - "\022\013\n\007SUCCESS\020\000\022\t\n\005ERROR\020\001\022\t\n\005FATAL\020\002\"#\n\017R" + - "pcResponseBody\022\020\n\010response\030\001 \001(\014\"9\n\014RpcE" + - "xception\022\025\n\rexceptionName\030\001 \002(\t\022\022\n\nstack" + - "Trace\030\002 \001(\tB<\n*org.apache.hadoop.hbase.p" + - "rotobuf.generatedB\tRPCProtosH\001\240\001\001" + "rotocol\"j\n\020RpcRequestHeader\022\016\n\006callId\030\001 " + + "\002(\r\022\030\n\005tinfo\030\002 \001(\0132\t.RPCTInfo\022\022\n\nmethodN" + + "ame\030\003 \002(\t\022\030\n\020requestClassName\030\004 \001(\t\"\225\001\n\021" + + "RpcResponseHeader\022\016\n\006callId\030\001 \002(\r\022)\n\006sta" + + "tus\030\002 \002(\0162\031.RpcResponseHeader.Status\022\030\n\020", + "responseBodyType\030\003 \001(\014\"+\n\006Status\022\013\n\007SUCC" + + "ESS\020\000\022\t\n\005ERROR\020\001\022\t\n\005FATAL\020\002\"9\n\014RpcExcept" + + "ion\022\025\n\rexceptionName\030\001 \002(\t\022\022\n\nstackTrace" + + "\030\002 \001(\tB<\n*org.apache.hadoop.hbase.protob" + + "uf.generatedB\tRPCProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -3732,35 +3042,19 @@ public final class RPCProtos { internal_static_RpcRequestHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RpcRequestHeader_descriptor, - new java.lang.String[] { "CallId", "Tinfo", }, + new java.lang.String[] { "CallId", "Tinfo", "MethodName", "RequestClassName", }, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.Builder.class); - internal_static_RpcRequestBody_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_RpcRequestBody_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RpcRequestBody_descriptor, - new java.lang.String[] { "MethodName", "Request", "RequestClassName", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.Builder.class); internal_static_RpcResponseHeader_descriptor = - getDescriptor().getMessageTypes().get(4); + getDescriptor().getMessageTypes().get(3); internal_static_RpcResponseHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RpcResponseHeader_descriptor, - new java.lang.String[] { "CallId", "Status", }, + new java.lang.String[] { "CallId", "Status", "ResponseBodyType", }, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Builder.class); - internal_static_RpcResponseBody_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_RpcResponseBody_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RpcResponseBody_descriptor, - new java.lang.String[] { "Response", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.Builder.class); internal_static_RpcException_descriptor = - getDescriptor().getMessageTypes().get(6); + getDescriptor().getMessageTypes().get(4); internal_static_RpcException_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RpcException_descriptor, diff --git a/hbase-protocol/src/main/protobuf/RPC.proto b/hbase-protocol/src/main/protobuf/RPC.proto index 90ec7ce..5689b33 100644 --- a/hbase-protocol/src/main/protobuf/RPC.proto +++ b/hbase-protocol/src/main/protobuf/RPC.proto @@ -29,15 +29,18 @@ * <"hrpc"-bytearray><'5'[byte]> * * For every RPC that the client makes it needs to send the following - * RpcRequestHeader and the RpcRequestBody. At the data level this looks like: + * RpcRequestHeader and the request body. At the data level this looks like: * + * length-of-serialized-request-body + length-of-varint32-of-serialized-request-body> * - * + * + * - * + * ]> * On a failure, the server's protobuf response looks like * * @@ -78,19 +81,8 @@ message RpcRequestHeader { /** Monotonically increasing callId, mostly to keep track of RPCs */ required uint32 callId = 1; optional RPCTInfo tinfo = 2; -} -/** - * The RPC request body - */ -message RpcRequestBody { /** Name of the RPC method */ - required string methodName = 1; - - /** Bytes corresponding to the client protobuf request. This is the actual - * bytes corresponding to the RPC request argument. - */ - optional bytes request = 2; - + required string methodName = 3; /** Some metainfo about the request. Helps us to treat RPCs with * different priorities. For now this is just the classname of the request * proto object. @@ -111,15 +103,10 @@ message RpcResponseHeader { FATAL = 2; } required Status status = 2; -} -/** - * The RPC response body - */ -message RpcResponseBody { - /** Optional response bytes. This is the actual bytes corresponding to the - * return value of the invoked RPC. - */ - optional bytes response = 1; + /** Optional blob of bytes that would signify what's coming in the response body. + * Is it a PB or is it a bunch of ByteBuffers (e.g. KeyValue objects). + */ + optional bytes responseBodyType = 3; } /** * At the RPC layer, this message is used to indicate diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ClientSideRpcRequest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ClientSideRpcRequest.java new file mode 100644 index 0000000..f874251 --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ClientSideRpcRequest.java @@ -0,0 +1,40 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.ipc; + +import org.apache.hadoop.classification.InterfaceAudience; + +import com.google.protobuf.Message; + +@InterfaceAudience.Private +class ClientSideRpcRequest { + String requestClassName; + String methodName; + Message requestArg; + ClientSideRpcRequest(String requestClassName, String methodName, Message requestArg) { + this.methodName = methodName; + this.requestClassName = requestClassName; + this.requestArg = requestArg; + } + static ClientSideRpcRequest constructRpcRequest(String requestClassName, String methodName, + Message requestArg) { + return new ClientSideRpcRequest(requestClassName, methodName, requestArg); + } +} \ No newline at end of file diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java index c19a62d..e3a3873 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java @@ -58,13 +58,12 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.IpcProtocol; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg; import org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo; import org.apache.hadoop.hbase.security.HBaseSaslRpcClient; import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.AuthMethod; @@ -89,6 +88,7 @@ import org.apache.hadoop.security.token.TokenSelector; import org.cloudera.htrace.Span; import org.cloudera.htrace.Trace; +import com.google.protobuf.CodedInputStream; import com.google.protobuf.CodedOutputStream; import com.google.protobuf.Message; import com.google.protobuf.Message.Builder; @@ -263,14 +263,14 @@ public class HBaseClient { /** A call waiting for a value. */ protected class Call { final int id; // call id - final RpcRequestBody param; // rpc request object + final ClientSideRpcRequest rpcRequest; // rpc request object Message value; // value, null if error IOException error; // exception, null if value boolean done; // true when call is done long startTime; - protected Call(RpcRequestBody param) { - this.param = param; + protected Call(ClientSideRpcRequest rpcRequest) { + this.rpcRequest = rpcRequest; this.startTime = System.currentTimeMillis(); synchronized (HBaseClient.this) { this.id = counter++; @@ -947,7 +947,9 @@ public class HBaseClient { RpcRequestHeader.Builder headerBuilder = RPCProtos.RpcRequestHeader.newBuilder(); headerBuilder.setCallId(call.id); - + headerBuilder.setMethodName(call.rpcRequest.methodName); + headerBuilder.setRequestClassName(call.rpcRequest.requestClassName); + if (Trace.isTracing()) { Span s = Trace.currentTrace(); headerBuilder.setTinfo(RPCTInfo.newBuilder() @@ -959,13 +961,16 @@ public class HBaseClient { synchronized (this.out) { // FindBugs IS2_INCONSISTENT_SYNC RpcRequestHeader header = headerBuilder.build(); int serializedHeaderSize = header.getSerializedSize(); - int requestSerializedSize = call.param.getSerializedSize(); + int requestSerializedSize = call.rpcRequest.requestArg.getSerializedSize(); this.out.writeInt(serializedHeaderSize + CodedOutputStream.computeRawVarint32Size(serializedHeaderSize) + requestSerializedSize + CodedOutputStream.computeRawVarint32Size(requestSerializedSize)); header.writeDelimitedTo(this.out); - call.param.writeDelimitedTo(this.out); + CodedOutputStream cos = CodedOutputStream.newInstance(out,1); + cos.writeRawVarint32(call.rpcRequest.requestArg.getSerializedSize()); + call.rpcRequest.requestArg.writeTo(cos); + cos.flush(); this.out.flush(); } } catch(IOException e) { @@ -973,7 +978,6 @@ public class HBaseClient { } } - private Method getMethod(Class protocol, String methodName) { Method method = methodInstances.get(methodName); @@ -1024,11 +1028,12 @@ public class HBaseClient { try { rpcResponseType = ProtobufRpcClientEngine.Invoker.getReturnProtoType( getMethod(remoteId.getProtocol(), - call.param.getMethodName())); + call.rpcRequest.methodName)); } catch (Exception e) { throw new RuntimeException(e); //local exception } Builder builder = rpcResponseType.newBuilderForType(); + CodedInputStream cin = CodedInputStream.newInstance(in); builder.mergeDelimitedFrom(in); Message value = builder.build(); // it's possible that this call may have been cleaned up due to a RPC @@ -1142,7 +1147,7 @@ public class HBaseClient { private final ParallelResults results; protected final int index; - public ParallelCall(RpcRequestBody param, ParallelResults results, int index) { + public ParallelCall(ClientSideRpcRequest param, ParallelResults results, int index) { super(param); this.results = results; this.index = index; @@ -1162,7 +1167,7 @@ public class HBaseClient { protected int count; public ParallelResults(int size) { - this.values = new RpcResponseBody[size]; + this.values = new EmptyMsg[size]; this.size = size; } @@ -1285,12 +1290,12 @@ public class HBaseClient { * @return Message * @throws IOException e */ - public Message call(RpcRequestBody param, InetSocketAddress address) + public Message call(ClientSideRpcRequest param, InetSocketAddress address) throws IOException, InterruptedException { return call(param, address, null, 0); } - public Message call(RpcRequestBody param, InetSocketAddress addr, + public Message call(ClientSideRpcRequest param, InetSocketAddress addr, User ticket, int rpcTimeout) throws IOException, InterruptedException { return call(param, addr, null, ticket, rpcTimeout); @@ -1301,7 +1306,7 @@ public class HBaseClient { * with the ticket credentials, returning the value. * Throws exceptions if there are network problems or if the remote code * threw an exception. */ - public Message call(RpcRequestBody param, InetSocketAddress addr, + public Message call(ClientSideRpcRequest param, InetSocketAddress addr, Class protocol, User ticket, int rpcTimeout) throws InterruptedException, IOException { @@ -1372,11 +1377,11 @@ public class HBaseClient { * corresponding address. When all values are available, or have timed out * or errored, the collected results are returned in an array. The array * contains nulls for calls that timed out or errored. */ - public Message[] call(RpcRequestBody[] params, InetSocketAddress[] addresses, + public Message[] call(ClientSideRpcRequest[] params, InetSocketAddress[] addresses, Class protocol, User ticket) throws IOException, InterruptedException { - if (addresses.length == 0) return new RpcResponseBody[0]; + if (addresses.length == 0) return new EmptyMsg[0]; ParallelResults results = new ParallelResults(params.length); // TODO this synchronization block doesnt make any sense, we should possibly fix it @@ -1493,4 +1498,4 @@ public class HBaseClient { (ticket == null ? 0 : ticket.hashCode()) )) ^ rpcTimeout; } } -} \ No newline at end of file +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java index 8b075fa..472e06a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java @@ -26,6 +26,7 @@ import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; +import java.io.InputStream; import java.net.BindException; import java.net.InetAddress; import java.net.InetSocketAddress; @@ -72,7 +73,6 @@ import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; import org.apache.hadoop.hbase.monitoring.TaskMonitor; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status; @@ -85,10 +85,12 @@ import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslStatus; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.ByteBufferOutputStream; import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.ipc.RPC.VersionMismatch; +import org.apache.hadoop.record.Buffer; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; @@ -100,6 +102,13 @@ import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.util.StringUtils; + +import com.google.common.base.Function; +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import com.google.protobuf.CodedInputStream; +import com.google.protobuf.CodedOutputStream; +import com.google.protobuf.Message; + import org.cliffc.high_scale_lib.Counter; import org.cloudera.htrace.Sampler; import org.cloudera.htrace.Span; @@ -309,7 +318,7 @@ public abstract class HBaseServer implements RpcServer { /** A call queued for handling. */ protected class Call implements RpcCallContext { protected int id; // the client's call id - protected RpcRequestBody rpcRequestBody; // the parameter passed + protected ServerSideRpcRequest rpcRequest; // the parameter passed protected Connection connection; // connection to client protected long timestamp; // the time received when response is null // the time served when response is not null @@ -322,10 +331,21 @@ public abstract class HBaseServer implements RpcServer { protected boolean isError; protected TraceInfo tinfo; - public Call(int id, RpcRequestBody rpcRequestBody, Connection connection, + public Call(ServerSideRpcRequest rpcRequest, Connection connection, + Responder responder, long size, TraceInfo tinfo) { + this.rpcRequest = rpcRequest; + this.id = rpcRequest.header.getCallId(); + setFields(connection, responder, size, tinfo); + } + + public Call(int id, Connection connection, Responder responder, long size, TraceInfo tinfo) { this.id = id; - this.rpcRequestBody = rpcRequestBody; + setFields(connection, responder, size, tinfo); + } + + private void setFields(Connection connection, Responder responder, + long size, TraceInfo tinfo) { this.connection = connection; this.timestamp = System.currentTimeMillis(); this.response = null; @@ -338,7 +358,7 @@ public abstract class HBaseServer implements RpcServer { @Override public String toString() { - return rpcRequestBody.toString() + " from " + connection.toString(); + return rpcRequest.toString() + " from " + connection.toString(); } protected synchronized void setSaslTokenResponse(ByteBuffer response) { @@ -373,7 +393,12 @@ public abstract class HBaseServer implements RpcServer { b.build().writeDelimitedTo(out); } else { if (value != null) { - ((Message)value).writeDelimitedTo(out); + //Wanted to turn off buffering in the CodedOutputStream but can't. + //If 0 is passed in the newInstance, IndexOutofBounds results + CodedOutputStream cos = CodedOutputStream.newInstance(out,1); + cos.writeRawVarint32(((Message)value).getSerializedSize()); + ((Message)value).writeTo(cos); + cos.flush(); } } if (connection.useWrap) { @@ -1126,12 +1151,12 @@ public abstract class HBaseServer implements RpcServer { // Fake 'call' for failed authorization response private static final int AUTHROIZATION_FAILED_CALLID = -1; private final Call authFailedCall = new Call(AUTHROIZATION_FAILED_CALLID, - null, this, null, 0, null); + this, null, 0, null); private ByteArrayOutputStream authFailedResponse = new ByteArrayOutputStream(); // Fake 'call' for SASL context setup private static final int SASL_CALLID = -33; - private final Call saslCall = new Call(SASL_CALLID, null, this, null, 0, + private final Call saslCall = new Call(SASL_CALLID, this, null, 0, null); public UserGroupInformation attemptingUser = null; // user name before auth @@ -1480,7 +1505,7 @@ public abstract class HBaseServer implements RpcServer { // we return 0 which will keep the socket up -- bad clients, unless // they switch to suit the running server -- will fail later doing // getProtocolVersion. - Call fakeCall = new Call(0, null, this, responder, 0, null); + Call fakeCall = new Call(0, this, responder, 0, null); // Versions 3 and greater can interpret this exception // response in the same manner setupResponse(buffer, fakeCall, Status.FATAL, @@ -1594,7 +1619,9 @@ public abstract class HBaseServer implements RpcServer { protected void processData(byte[] buf) throws IOException, InterruptedException { DataInputStream dis = new DataInputStream(new ByteArrayInputStream(buf)); + int totalLength = dis.available(); RpcRequestHeader request = RpcRequestHeader.parseDelimitedFrom(dis); + int lengthAfterHeaderRead = dis.available(); int id = request.getCallId(); long callSize = buf.length; @@ -1604,7 +1631,7 @@ public abstract class HBaseServer implements RpcServer { } // Enforcing the call queue size, this triggers a retry in the client if ((callSize + callQueueSize.get()) > maxQueueSize) { - final Call callTooBig = new Call(id, null, this, responder, callSize, + final Call callTooBig = new Call(id, this, responder, callSize, null); ByteArrayOutputStream responseBuffer = new ByteArrayOutputStream(); setupResponse(responseBuffer, callTooBig, Status.FATAL, @@ -1613,14 +1640,14 @@ public abstract class HBaseServer implements RpcServer { responder.doRespond(callTooBig); return; } - - RpcRequestBody rpcRequestBody; + ServerSideRpcRequest rpcRequestBody; try { - rpcRequestBody = RpcRequestBody.parseDelimitedFrom(dis); + rpcRequestBody = ServerSideRpcRequest.constructRpcRequestOnServer(request, buf, + totalLength - lengthAfterHeaderRead, lengthAfterHeaderRead); } catch (Throwable t) { LOG.warn("Unable to read call parameters for client " + getHostAddress(), t); - final Call readParamsFailedCall = new Call(id, null, this, responder, + final Call readParamsFailedCall = new Call(id, this, responder, callSize, null); ByteArrayOutputStream responseBuffer = new ByteArrayOutputStream(); @@ -1633,11 +1660,11 @@ public abstract class HBaseServer implements RpcServer { Call call; if (request.hasTinfo()) { - call = new Call(id, rpcRequestBody, this, responder, callSize, + call = new Call(rpcRequestBody, this, responder, callSize, new TraceInfo(request.getTinfo().getTraceId(), request.getTinfo() .getParentId())); } else { - call = new Call(id, rpcRequestBody, this, responder, callSize, null); + call = new Call(rpcRequestBody, this, responder, callSize, null); } callQueueSize.add(callSize); @@ -1754,8 +1781,7 @@ public abstract class HBaseServer implements RpcServer { call.connection.getRemotePort()); if (LOG.isDebugEnabled()) - LOG.debug(getName() + ": has #" + call.id + " from " + - call.connection); + LOG.debug(getName() + ": has #" + call.id + " from " + call.connection); String errorClass = null; String error = null; @@ -1783,7 +1809,7 @@ public abstract class HBaseServer implements RpcServer { call.connection.protocol); // make the call - value = call(call.connection.protocol, call.rpcRequestBody, call.timestamp, + value = call(call.connection.protocol, call.rpcRequest, call.timestamp, status); } catch (Throwable e) { LOG.debug(getName()+", call "+call+": error: " + e, e); @@ -1837,7 +1863,7 @@ public abstract class HBaseServer implements RpcServer { } - private Function qosFunction = null; + private Function qosFunction = null; /** * Gets the QOS level for this call. If it is higher than the highPriorityLevel and there @@ -1846,11 +1872,11 @@ public abstract class HBaseServer implements RpcServer { * @param newFunc */ @Override - public void setQosFunction(Function newFunc) { + public void setQosFunction(Function newFunc) { qosFunction = newFunc; } - protected int getQosLevel(RpcRequestBody rpcRequestBody) { + protected int getQosLevel(ServerSideRpcRequest rpcRequestBody) { if (qosFunction == null) { return 0; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcClientEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcClientEngine.java index a320095..85c3222 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcClientEngine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcClientEngine.java @@ -33,7 +33,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.IpcProtocol; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.ipc.RemoteException; @@ -88,30 +87,6 @@ public class ProtobufRpcClientEngine implements RpcClientEngine { this.rpcTimeout = rpcTimeout; } - private RpcRequestBody constructRpcRequest(Method method, - Object[] params) throws ServiceException { - RpcRequestBody rpcRequest; - RpcRequestBody.Builder builder = RpcRequestBody.newBuilder(); - builder.setMethodName(method.getName()); - Message param; - int length = params.length; - if (length == 2) { - // RpcController + Message in the method args - // (generated code from RPC bits in .proto files have RpcController) - param = (Message)params[1]; - } else if (length == 1) { // Message - param = (Message)params[0]; - } else { - throw new ServiceException("Too many parameters for request. Method: [" - + method.getName() + "]" + ", Expected: 2, Actual: " - + params.length); - } - builder.setRequestClassName(param.getClass().getName()); - builder.setRequest(param.toByteString()); - rpcRequest = builder.build(); - return rpcRequest; - } - /** * This is the client side invoker of RPC method. It only throws * ServiceException, since the invocation proxy expects only @@ -137,8 +112,21 @@ public class ProtobufRpcClientEngine implements RpcClientEngine { if (LOG.isDebugEnabled()) { startTime = System.currentTimeMillis(); } - - RpcRequestBody rpcRequest = constructRpcRequest(method, args); + Message param; + int length = args.length; + if (length == 2) { + // RpcController + Message in the method args + // (generated code from RPC bits in .proto files have RpcController) + param = (Message)args[1]; + } else if (length == 1) { // Message + param = (Message)args[0]; + } else { + throw new ServiceException("Too many parameters for request. Method: [" + + method.getName() + "]" + ", Expected: 2, Actual: " + + args.length); + } + ClientSideRpcRequest rpcRequest = ClientSideRpcRequest.constructRpcRequest( + param.getClass().getName(), method.getName(), param); Message val = null; try { val = client.call(rpcRequest, address, protocol, ticket, rpcTimeout); @@ -177,4 +165,4 @@ public class ProtobufRpcClientEngine implements RpcClientEngine { return protoType; } } -} \ No newline at end of file +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcServerEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcServerEngine.java index cf68497..03e6e00 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcServerEngine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcServerEngine.java @@ -34,7 +34,6 @@ import org.apache.hadoop.hbase.IpcProtocol; import org.apache.hadoop.hbase.client.Operation; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.security.HBasePolicyProvider; import org.apache.hadoop.hbase.security.token.AuthenticationTokenSecretManager; @@ -42,6 +41,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; import org.codehaus.jackson.map.ObjectMapper; +import com.google.protobuf.CodedInputStream; import com.google.protobuf.Message; import com.google.protobuf.ServiceException; /** @@ -152,7 +152,7 @@ class ProtobufRpcServerEngine implements RpcServerEngine { * exception name and the stack trace are returned in the protobuf response. */ public Message call(Class protocol, - RpcRequestBody rpcRequest, long receiveTime, MonitoredRPCHandler status) + ServerSideRpcRequest rpcRequest, long receiveTime, MonitoredRPCHandler status) throws IOException { try { String methodName = rpcRequest.getMethodName(); @@ -175,13 +175,15 @@ class ProtobufRpcServerEngine implements RpcServerEngine { } status.setRPC(rpcRequest.getMethodName(), - new Object[]{rpcRequest.getRequest()}, receiveTime); - status.setRPCPacket(rpcRequest); + new Object[]{rpcRequest.buf}, receiveTime); + status.setRPCPacket(rpcRequest.buf); status.resume("Servicing call"); //get an instance of the method arg type Message protoType = getMethodArgType(method); - Message param = protoType.newBuilderForType() - .mergeFrom(rpcRequest.getRequest()).build(); + CodedInputStream cis = CodedInputStream.newInstance(rpcRequest.buf, + rpcRequest.offset, rpcRequest.length); + cis.setSizeLimit(cis.readRawVarint32()); + Message param = protoType.newBuilderForType().mergeFrom(cis).build(); Message result; Object impl = null; if (protocol.isAssignableFrom(this.implementation)) { @@ -234,7 +236,7 @@ class ProtobufRpcServerEngine implements RpcServerEngine { buffer.append("("); buffer.append(param.getClass().getName()); buffer.append(")"); - logResponse(new Object[]{rpcRequest.getRequest()}, + logResponse(new Object[]{rpcRequest}, methodName, buffer.toString(), (tooLarge ? "TooLarge" : "TooSlow"), status.getClient(), startTime, processingTime, qTime, responseSize); @@ -367,4 +369,4 @@ class ProtobufRpcServerEngine implements RpcServerEngine { LOG.info(v); } } -} \ No newline at end of file +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index 6e244bd..4f82eff 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -25,7 +25,6 @@ import com.google.protobuf.Message; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.IpcProtocol; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; import java.io.IOException; import java.net.InetSocketAddress; @@ -46,19 +45,23 @@ public interface RpcServer { InetSocketAddress getListenerAddress(); - /** Called for each call. + /** Called for each call (TODO: this currently only returns a Message + * instance. But we should also support objects of other + * types (for e.g., ByteBuffer[]). We should define a class/interface that can + * handle both Message and maybe something like ByteBuffer[]. The only + * methods in the interface would be writeTo(OutputStream) and getSerializedLength() * @param param parameter * @param receiveTime time * @return Message Protobuf response Message * @throws java.io.IOException e */ Message call(Class protocol, - RpcRequestBody param, long receiveTime, MonitoredRPCHandler status) + ServerSideRpcRequest param, long receiveTime, MonitoredRPCHandler status) throws IOException; void setErrorHandler(HBaseRPCErrorHandler handler); - void setQosFunction(Function newFunc); + void setQosFunction(Function newFunc); void openServer(); @@ -68,4 +71,4 @@ public interface RpcServer { * Returns the metrics instance for reporting RPC call statistics */ MetricsHBaseServer getMetrics(); -} \ No newline at end of file +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerSideRpcRequest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerSideRpcRequest.java new file mode 100644 index 0000000..f04f50c --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerSideRpcRequest.java @@ -0,0 +1,61 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.ipc; + +import java.io.ByteArrayInputStream; +import java.io.DataInputStream; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader; + +@InterfaceAudience.Private +public class ServerSideRpcRequest { + RpcRequestHeader header; + byte[] buf; + int offset; + int length; + + public ServerSideRpcRequest(RpcRequestHeader header, byte[] buf, int offset, int length) { + this.header = header; + this.buf = buf; + this.length = length; + this.offset = offset; + } + + public String getMethodName() { + return this.header.getMethodName(); + } + + public boolean hasRequestClassName() { + return header.hasRequestClassName(); + } + + public String getRequestClassName() { + return header.getRequestClassName(); + } + + public DataInputStream getRequestAsDataInputStream() { + return new DataInputStream(new ByteArrayInputStream(buf, offset, length)); + } + + public static ServerSideRpcRequest constructRpcRequestOnServer( + RpcRequestHeader header, byte[] buf, int offset, int length) { + return new ServerSideRpcRequest(header, buf, offset, length); + } +} \ No newline at end of file diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java index 795b88c..5c433d2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.monitoring; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; /** * A MonitoredTask implementation optimized for use with RPC Handlers @@ -39,6 +38,6 @@ public interface MonitoredRPCHandler extends MonitoredTask { public abstract void setRPC(String methodName, Object [] params, long queueTime); - public abstract void setRPCPacket(RpcRequestBody param); + public abstract void setRPCPacket(byte[] buf); public abstract void setConnection(String clientAddress, int remotePort); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java index bb5b928..07fb5c5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.monitoring; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Operation; import org.apache.hadoop.hbase.io.WritableWithSize; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.Writable; @@ -46,7 +45,7 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl private long rpcStartTime; private String methodName = ""; private Object [] params = {}; - private RpcRequestBody packet; + private byte[] packet; public MonitoredRPCHandlerImpl() { super(); @@ -141,7 +140,7 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl // no RPC is currently running, or we don't have an RPC's packet info return -1L; } - return packet.getSerializedSize(); + return packet.length; } /** @@ -199,10 +198,10 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl /** * Gives this instance a reference to the protobuf received by the RPC, so * that it can later compute its size if asked for it. - * @param param The protobuf received by the RPC for this call + * @param packet The protobuf request payload received by the RPC for this call */ - public void setRPCPacket(RpcRequestBody param) { - this.packet = param; + public void setRPCPacket(byte[] packet) { + this.packet = packet; } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 7af0d52..f7f08a2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -18,7 +18,9 @@ */ package org.apache.hadoop.hbase.regionserver; +import java.io.DataInputStream; import java.io.IOException; +import java.io.InputStream; import java.lang.Thread.UncaughtExceptionHandler; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; @@ -109,6 +111,7 @@ import org.apache.hadoop.hbase.ipc.HBaseServerRPC; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.ipc.ServerSideRpcRequest; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.RequestConverter; import org.apache.hadoop.hbase.protobuf.ResponseConverter; @@ -166,7 +169,6 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest; @@ -214,6 +216,7 @@ import org.cliffc.high_scale_lib.Counter; import com.google.common.base.Function; import com.google.protobuf.ByteString; +import com.google.protobuf.CodedInputStream; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; @@ -564,7 +567,7 @@ public class HRegionServer implements ClientProtocol, * Utility used ensuring higher quality of service for priority rpcs; e.g. * rpcs to .META. and -ROOT-, etc. */ - class QosFunction implements Function { + class QosFunction implements Function { private final Map annotatedQos; //We need to mock the regionserver instance for some unit tests (set via //setRegionServer method. @@ -629,7 +632,7 @@ public class HRegionServer implements ClientProtocol, argumentToClassMap.put(cls.getCanonicalName(), cls); try { methodMap.get("parseFrom").put(cls, - cls.getDeclaredMethod("parseFrom",ByteString.class)); + cls.getDeclaredMethod("parseFrom",CodedInputStream.class)); methodMap.get("getRegion").put(cls, cls.getDeclaredMethod("getRegion")); } catch (Exception e) { throw new RuntimeException(e); @@ -652,7 +655,7 @@ public class HRegionServer implements ClientProtocol, } @Override - public Integer apply(RpcRequestBody from) { + public Integer apply(ServerSideRpcRequest from) { String methodName = from.getMethodName(); Class rpcArgClass = null; if (from.hasRequestClassName()) { @@ -664,15 +667,23 @@ public class HRegionServer implements ClientProtocol, if (priorityByAnnotation != null) { return priorityByAnnotation; } - - if (rpcArgClass == null || from.getRequest().isEmpty()) { - return HConstants.NORMAL_QOS; + DataInputStream dis; + try { + if (rpcArgClass == null || + (dis = from.getRequestAsDataInputStream()).available() == 0) { + return HConstants.NORMAL_QOS; + } + } catch (IOException ie) { + throw new RuntimeException(ie); } + //CodedInputStream cis = CodedInputStream.newInstance(dis);cis.setSizeLimit(cis.readRawVarint32()); Object deserializedRequestObj; //check whether the request has reference to Meta region try { + CodedInputStream cis = CodedInputStream.newInstance(dis);cis.setSizeLimit(cis.readRawVarint32()); + Method parseFrom = methodMap.get("parseFrom").get(rpcArgClass); - deserializedRequestObj = parseFrom.invoke(null, from.getRequest()); + deserializedRequestObj = parseFrom.invoke(null, cis); Method getRegion = methodMap.get("getRegion").get(rpcArgClass); RegionSpecifier regionSpecifier = (RegionSpecifier)getRegion.invoke(deserializedRequestObj, @@ -685,6 +696,7 @@ public class HRegionServer implements ClientProtocol, return HConstants.HIGH_QOS; } } catch (Exception ex) { + ex.printStackTrace(); throw new RuntimeException(ex); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java index f54350d..3b0accf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java @@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.IpcProtocol; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.util.StringUtils; @@ -60,9 +60,9 @@ public class TestIPC { @Override public Message call(Class protocol, - RpcRequestBody param, long receiveTime, MonitoredRPCHandler status) + ServerSideRpcRequest rpcRequest, long receiveTime, MonitoredRPCHandler status) throws IOException { - return param; + return EmptyMsg.getDefaultInstance(); } } @@ -88,11 +88,13 @@ public class TestIPC { InetSocketAddress address = rpcServer.getListenerAddress(); try { - client.call(RpcRequestBody.getDefaultInstance(), address, User.getCurrent(), 0); + ClientSideRpcRequest emptyReq = ClientSideRpcRequest.constructRpcRequest + ("", "", EmptyMsg.getDefaultInstance()); + client.call(emptyReq, address, User.getCurrent(), 0); fail("Expected an exception to have been thrown!"); } catch (Exception e) { LOG.info("Caught expected exception: " + e.toString()); assertTrue(StringUtils.stringifyException(e).contains("Injected fault")); } } -} \ No newline at end of file +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java index 82840c5..2119f17 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java @@ -28,13 +28,14 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.ipc.ServerSideRpcRequest; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader; import org.apache.hadoop.hbase.regionserver.HRegionServer.QosFunction; import org.junit.BeforeClass; import org.junit.Test; @@ -42,6 +43,7 @@ import org.junit.experimental.categories.Category; import org.mockito.Mockito; import com.google.protobuf.ByteString; +import com.google.protobuf.CodedOutputStream; /** * Tests that verify certain RPCs get a higher QoS. */ @@ -59,12 +61,13 @@ public class TestPriorityRpc { @Test public void testQosFunctionForMeta() throws IOException { qosFunction = regionServer.getQosFunction(); - RpcRequestBody.Builder rpcRequestBuilder = RpcRequestBody.newBuilder(); //create a rpc request that has references to META region and also //uses one of the known argument classes (known argument classes are //listed in HRegionServer.QosFunction.knownArgumentClasses) - rpcRequestBuilder = RpcRequestBody.newBuilder(); - rpcRequestBuilder.setMethodName("foo"); + RpcRequestHeader.Builder headerBuilder = RpcRequestHeader.newBuilder(); + headerBuilder.setCallId(0); + headerBuilder.setMethodName("foo"); + headerBuilder.setRequestClassName(GetRequest.class.getCanonicalName()); GetRequest.Builder getRequestBuilder = GetRequest.newBuilder(); RegionSpecifier.Builder regionSpecifierBuilder = RegionSpecifier.newBuilder(); @@ -76,10 +79,17 @@ public class TestPriorityRpc { getRequestBuilder.setRegion(regionSpecifier); Get.Builder getBuilder = Get.newBuilder(); getBuilder.setRow(ByteString.copyFrom("somerow".getBytes())); - getRequestBuilder.setGet(getBuilder.build()); - rpcRequestBuilder.setRequest(getRequestBuilder.build().toByteString()); - rpcRequestBuilder.setRequestClassName(GetRequest.class.getCanonicalName()); - RpcRequestBody rpcRequest = rpcRequestBuilder.build(); + getRequestBuilder.setGet(getBuilder.build()); + + GetRequest getRequest = getRequestBuilder.build(); + byte[] buf = new byte[CodedOutputStream.computeRawVarint32Size(getRequest.getSerializedSize()) + + getRequest.getSerializedSize()]; + CodedOutputStream cos = CodedOutputStream.newInstance(buf, 0, buf.length); + cos.writeRawVarint32(getRequest.getSerializedSize()); + getRequest.writeTo(cos); + ServerSideRpcRequest serverRpcRequest = new ServerSideRpcRequest( + headerBuilder.build(), buf, 0, buf.length); + HRegion mockRegion = Mockito.mock(HRegion.class); HRegionServer mockRS = Mockito.mock(HRegionServer.class); HRegionInfo mockRegionInfo = Mockito.mock(HRegionInfo.class); @@ -87,7 +97,7 @@ public class TestPriorityRpc { Mockito.when(mockRegion.getRegionInfo()).thenReturn(mockRegionInfo); Mockito.when(mockRegionInfo.isMetaRegion()).thenReturn(true); qosFunction.setRegionServer(mockRS); - assertTrue (qosFunction.apply(rpcRequest) == HConstants.HIGH_QOS); + assertTrue (qosFunction.apply(serverRpcRequest) == HConstants.HIGH_QOS); } @Test @@ -96,33 +106,46 @@ public class TestPriorityRpc { //known argument classes (it uses one random request class) //(known argument classes are listed in //HRegionServer.QosFunction.knownArgumentClasses) - RpcRequestBody.Builder rpcRequestBuilder = RpcRequestBody.newBuilder(); - rpcRequestBuilder.setMethodName("foo"); - rpcRequestBuilder.setRequestClassName(GetOnlineRegionRequest.class.getCanonicalName()); - RpcRequestBody rpcRequest = rpcRequestBuilder.build(); + RpcRequestHeader.Builder headerBuilder = RpcRequestHeader.newBuilder(); + headerBuilder.setCallId(0); + headerBuilder.setMethodName("foo"); + headerBuilder.setRequestClassName(GetOnlineRegionRequest.class.getCanonicalName()); + GetOnlineRegionRequest b = GetOnlineRegionRequest.newBuilder().build(); + ServerSideRpcRequest serverRpcRequest = new ServerSideRpcRequest( + headerBuilder.build(), b.toByteArray(), 0, b.getSerializedSize()); QosFunction qosFunc = regionServer.getQosFunction(); - assertTrue (qosFunc.apply(rpcRequest) == HConstants.NORMAL_QOS); + assertTrue (qosFunc.apply(serverRpcRequest) == HConstants.NORMAL_QOS); } @Test public void testQosFunctionForScanMethod() throws IOException { - RpcRequestBody.Builder rpcRequestBuilder = RpcRequestBody.newBuilder(); - rpcRequestBuilder.setMethodName("scan"); + RpcRequestHeader.Builder headerBuilder = RpcRequestHeader.newBuilder(); + headerBuilder.setCallId(0); + headerBuilder.setMethodName("scan"); //build an empty scan request - ScanRequest.Builder scanBuilder = ScanRequest.newBuilder(); - ByteString requestBody = scanBuilder.build().toByteString(); - rpcRequestBuilder.setRequest(requestBody); - RpcRequestBody rpcRequest = rpcRequestBuilder.build(); - assertTrue (qosFunction.apply(rpcRequest) == HConstants.NORMAL_QOS); + ScanRequest scanRequest = ScanRequest.newBuilder().build(); + byte[] buf = new byte[CodedOutputStream.computeRawVarint32Size(scanRequest.getSerializedSize()) + + scanRequest.getSerializedSize()]; + CodedOutputStream cos = CodedOutputStream.newInstance(buf, 0, buf.length); + cos.writeRawVarint32(scanRequest.getSerializedSize()); + scanRequest.writeTo(cos); + ServerSideRpcRequest serverRpcRequest = new ServerSideRpcRequest( + headerBuilder.build(), buf, 0, buf.length); + assertTrue (qosFunction.apply(serverRpcRequest) == HConstants.NORMAL_QOS); //build a scan request with scannerID - scanBuilder = ScanRequest.newBuilder(); + ScanRequest.Builder scanBuilder = ScanRequest.newBuilder(); scanBuilder.setScannerId(12345); - requestBody = scanBuilder.build().toByteString(); - rpcRequestBuilder.setRequest(requestBody); - rpcRequestBuilder.setRequestClassName(ScanRequest.class.getCanonicalName()); - rpcRequest = rpcRequestBuilder.build(); + scanRequest = scanBuilder.build(); + headerBuilder.setRequestClassName(ScanRequest.class.getCanonicalName()); + buf = new byte[CodedOutputStream.computeRawVarint32Size(scanRequest.getSerializedSize()) + + scanRequest.getSerializedSize()]; + cos = CodedOutputStream.newInstance(buf, 0, buf.length); + cos.writeRawVarint32(scanRequest.getSerializedSize()); + scanRequest.writeTo(cos); + serverRpcRequest = new ServerSideRpcRequest( + headerBuilder.build(), buf, 0, buf.length); //mock out a high priority type handling and see the QoS returned HRegionServer mockRS = Mockito.mock(HRegionServer.class); RegionScanner mockRegionScanner = Mockito.mock(RegionScanner.class); @@ -136,11 +159,11 @@ public class TestPriorityRpc { qosFunction.setRegionServer(mockRS); - assertTrue (qosFunction.apply(rpcRequest) == HConstants.HIGH_QOS); + assertTrue (qosFunction.apply(serverRpcRequest) == HConstants.HIGH_QOS); //the same as above but with non-meta region Mockito.when(mockRegionInfo.isMetaRegion()).thenReturn(false); - assertTrue (qosFunction.apply(rpcRequest) == HConstants.NORMAL_QOS); + assertTrue (qosFunction.apply(serverRpcRequest) == HConstants.NORMAL_QOS); } }