diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java index d900c0a..7a94658 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java @@ -1099,83 +1099,44 @@ public final class RPCProtos { // @@protoc_insertion_point(class_scope:ConnectionHeader) } - public interface RpcRequestHeaderOrBuilder + public interface EncodedDataBlockMetaOrBuilder extends com.google.protobuf.MessageOrBuilder { - - // required uint32 callId = 1; - boolean hasCallId(); - int getCallId(); - - // optional .RPCTInfo tinfo = 2; - boolean hasTinfo(); - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTinfo(); - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTinfoOrBuilder(); } - public static final class RpcRequestHeader extends + public static final class EncodedDataBlockMeta extends com.google.protobuf.GeneratedMessage - implements RpcRequestHeaderOrBuilder { - // Use RpcRequestHeader.newBuilder() to construct. - private RpcRequestHeader(Builder builder) { + implements EncodedDataBlockMetaOrBuilder { + // Use EncodedDataBlockMeta.newBuilder() to construct. + private EncodedDataBlockMeta(Builder builder) { super(builder); } - private RpcRequestHeader(boolean noInit) {} + private EncodedDataBlockMeta(boolean noInit) {} - private static final RpcRequestHeader defaultInstance; - public static RpcRequestHeader getDefaultInstance() { + private static final EncodedDataBlockMeta defaultInstance; + public static EncodedDataBlockMeta getDefaultInstance() { return defaultInstance; } - public RpcRequestHeader getDefaultInstanceForType() { + public EncodedDataBlockMeta getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestHeader_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_EncodedDataBlockMeta_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestHeader_fieldAccessorTable; - } - - private int bitField0_; - // required uint32 callId = 1; - public static final int CALLID_FIELD_NUMBER = 1; - private int callId_; - public boolean hasCallId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getCallId() { - return callId_; - } - - // optional .RPCTInfo tinfo = 2; - public static final int TINFO_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo tinfo_; - public boolean hasTinfo() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTinfo() { - return tinfo_; - } - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTinfoOrBuilder() { - return tinfo_; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_EncodedDataBlockMeta_fieldAccessorTable; } private void initFields() { - callId_ = 0; - tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - if (!hasCallId()) { - memoizedIsInitialized = 0; - return false; - } memoizedIsInitialized = 1; return true; } @@ -1183,12 +1144,6 @@ public final class RPCProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt32(1, callId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, tinfo_); - } getUnknownFields().writeTo(output); } @@ -1198,14 +1153,6 @@ public final class RPCProtos { if (size != -1) return size; size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(1, callId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, tinfo_); - } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -1223,22 +1170,12 @@ public final class RPCProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader) obj; + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta) obj; boolean result = true; - result = result && (hasCallId() == other.hasCallId()); - if (hasCallId()) { - result = result && (getCallId() - == other.getCallId()); - } - result = result && (hasTinfo() == other.hasTinfo()); - if (hasTinfo()) { - result = result && getTinfo() - .equals(other.getTinfo()); - } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -1248,53 +1185,45 @@ public final class RPCProtos { public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasCallId()) { - hash = (37 * hash) + CALLID_FIELD_NUMBER; - hash = (53 * hash) + getCallId(); - } - if (hasTinfo()) { - hash = (37 * hash) + TINFO_FIELD_NUMBER; - hash = (53 * hash) + getTinfo().hashCode(); - } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -1303,7 +1232,7 @@ public final class RPCProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1314,12 +1243,12 @@ public final class RPCProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1329,7 +1258,7 @@ public final class RPCProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -1342,18 +1271,18 @@ public final class RPCProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeaderOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMetaOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestHeader_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_EncodedDataBlockMeta_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestHeader_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_EncodedDataBlockMeta_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -1364,7 +1293,6 @@ public final class RPCProtos { } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getTinfoFieldBuilder(); } } private static Builder create() { @@ -1373,14 +1301,6 @@ public final class RPCProtos { public Builder clear() { super.clear(); - callId_ = 0; - bitField0_ = (bitField0_ & ~0x00000001); - if (tinfoBuilder_ == null) { - tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); - } else { - tinfoBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); return this; } @@ -1390,24 +1310,24 @@ public final class RPCProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -1415,53 +1335,28 @@ public final class RPCProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.callId_ = callId_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (tinfoBuilder_ == null) { - result.tinfo_ = tinfo_; - } else { - result.tinfo_ = tinfoBuilder_.build(); - } - result.bitField0_ = to_bitField0_; + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.getDefaultInstance()) return this; - if (other.hasCallId()) { - setCallId(other.getCallId()); - } - if (other.hasTinfo()) { - mergeTinfo(other.getTinfo()); - } + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { - if (!hasCallId()) { - - return false; - } return true; } @@ -1488,280 +1383,60 @@ public final class RPCProtos { } break; } - case 8: { - bitField0_ |= 0x00000001; - callId_ = input.readUInt32(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder(); - if (hasTinfo()) { - subBuilder.mergeFrom(getTinfo()); - } - input.readMessage(subBuilder, extensionRegistry); - setTinfo(subBuilder.buildPartial()); - break; - } } } } - private int bitField0_; - - // required uint32 callId = 1; - private int callId_ ; - public boolean hasCallId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getCallId() { - return callId_; - } - public Builder setCallId(int value) { - bitField0_ |= 0x00000001; - callId_ = value; - onChanged(); - return this; - } - public Builder clearCallId() { - bitField0_ = (bitField0_ & ~0x00000001); - callId_ = 0; - onChanged(); - return this; - } - - // optional .RPCTInfo tinfo = 2; - private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder> tinfoBuilder_; - public boolean hasTinfo() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTinfo() { - if (tinfoBuilder_ == null) { - return tinfo_; - } else { - return tinfoBuilder_.getMessage(); - } - } - public Builder setTinfo(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo value) { - if (tinfoBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - tinfo_ = value; - onChanged(); - } else { - tinfoBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder setTinfo( - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder builderForValue) { - if (tinfoBuilder_ == null) { - tinfo_ = builderForValue.build(); - onChanged(); - } else { - tinfoBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder mergeTinfo(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo value) { - if (tinfoBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - tinfo_ != org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance()) { - tinfo_ = - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder(tinfo_).mergeFrom(value).buildPartial(); - } else { - tinfo_ = value; - } - onChanged(); - } else { - tinfoBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearTinfo() { - if (tinfoBuilder_ == null) { - tinfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); - onChanged(); - } else { - tinfoBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder getTinfoBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getTinfoFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTinfoOrBuilder() { - if (tinfoBuilder_ != null) { - return tinfoBuilder_.getMessageOrBuilder(); - } else { - return tinfo_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder> - getTinfoFieldBuilder() { - if (tinfoBuilder_ == null) { - tinfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder>( - tinfo_, - getParentForChildren(), - isClean()); - tinfo_ = null; - } - return tinfoBuilder_; - } - // @@protoc_insertion_point(builder_scope:RpcRequestHeader) + // @@protoc_insertion_point(builder_scope:EncodedDataBlockMeta) } static { - defaultInstance = new RpcRequestHeader(true); + defaultInstance = new EncodedDataBlockMeta(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:RpcRequestHeader) + // @@protoc_insertion_point(class_scope:EncodedDataBlockMeta) } - public interface RpcRequestBodyOrBuilder + public interface RequestParamMetaOrBuilder extends com.google.protobuf.MessageOrBuilder { - - // required string methodName = 1; - boolean hasMethodName(); - String getMethodName(); - - // optional bytes request = 2; - boolean hasRequest(); - com.google.protobuf.ByteString getRequest(); - - // optional string requestClassName = 4; - boolean hasRequestClassName(); - String getRequestClassName(); } - public static final class RpcRequestBody extends + public static final class RequestParamMeta extends com.google.protobuf.GeneratedMessage - implements RpcRequestBodyOrBuilder { - // Use RpcRequestBody.newBuilder() to construct. - private RpcRequestBody(Builder builder) { + implements RequestParamMetaOrBuilder { + // Use RequestParamMeta.newBuilder() to construct. + private RequestParamMeta(Builder builder) { super(builder); } - private RpcRequestBody(boolean noInit) {} + private RequestParamMeta(boolean noInit) {} - private static final RpcRequestBody defaultInstance; - public static RpcRequestBody getDefaultInstance() { + private static final RequestParamMeta defaultInstance; + public static RequestParamMeta getDefaultInstance() { return defaultInstance; } - public RpcRequestBody getDefaultInstanceForType() { + public RequestParamMeta getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RequestParamMeta_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_fieldAccessorTable; - } - - private int bitField0_; - // required string methodName = 1; - public static final int METHODNAME_FIELD_NUMBER = 1; - private java.lang.Object methodName_; - public boolean hasMethodName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getMethodName() { - java.lang.Object ref = methodName_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - methodName_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getMethodNameBytes() { - java.lang.Object ref = methodName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - methodName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional bytes request = 2; - public static final int REQUEST_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString request_; - public boolean hasRequest() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getRequest() { - return request_; - } - - // optional string requestClassName = 4; - public static final int REQUESTCLASSNAME_FIELD_NUMBER = 4; - private java.lang.Object requestClassName_; - public boolean hasRequestClassName() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public String getRequestClassName() { - java.lang.Object ref = requestClassName_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - requestClassName_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getRequestClassNameBytes() { - java.lang.Object ref = requestClassName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - requestClassName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RequestParamMeta_fieldAccessorTable; } private void initFields() { - methodName_ = ""; - request_ = com.google.protobuf.ByteString.EMPTY; - requestClassName_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - if (!hasMethodName()) { - memoizedIsInitialized = 0; - return false; - } memoizedIsInitialized = 1; return true; } @@ -1769,15 +1444,6 @@ public final class RPCProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getMethodNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, request_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(4, getRequestClassNameBytes()); - } getUnknownFields().writeTo(output); } @@ -1787,18 +1453,6 @@ public final class RPCProtos { if (size != -1) return size; size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getMethodNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, request_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, getRequestClassNameBytes()); - } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -1816,27 +1470,12 @@ public final class RPCProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody) obj; + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta) obj; boolean result = true; - result = result && (hasMethodName() == other.hasMethodName()); - if (hasMethodName()) { - result = result && getMethodName() - .equals(other.getMethodName()); - } - result = result && (hasRequest() == other.hasRequest()); - if (hasRequest()) { - result = result && getRequest() - .equals(other.getRequest()); - } - result = result && (hasRequestClassName() == other.hasRequestClassName()); - if (hasRequestClassName()) { - result = result && getRequestClassName() - .equals(other.getRequestClassName()); - } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -1846,57 +1485,45 @@ public final class RPCProtos { public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasMethodName()) { - hash = (37 * hash) + METHODNAME_FIELD_NUMBER; - hash = (53 * hash) + getMethodName().hashCode(); - } - if (hasRequest()) { - hash = (37 * hash) + REQUEST_FIELD_NUMBER; - hash = (53 * hash) + getRequest().hashCode(); - } - if (hasRequestClassName()) { - hash = (37 * hash) + REQUESTCLASSNAME_FIELD_NUMBER; - hash = (53 * hash) + getRequestClassName().hashCode(); - } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -1905,7 +1532,7 @@ public final class RPCProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1916,12 +1543,12 @@ public final class RPCProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1931,7 +1558,7 @@ public final class RPCProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -1944,18 +1571,18 @@ public final class RPCProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBodyOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMetaOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RequestParamMeta_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcRequestBody_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RequestParamMeta_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -1974,12 +1601,6 @@ public final class RPCProtos { public Builder clear() { super.clear(); - methodName_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - request_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - requestClassName_ = ""; - bitField0_ = (bitField0_ & ~0x00000004); return this; } @@ -1989,24 +1610,24 @@ public final class RPCProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -2014,56 +1635,28 @@ public final class RPCProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.methodName_ = methodName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.request_ = request_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.requestClassName_ = requestClassName_; - result.bitField0_ = to_bitField0_; + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.getDefaultInstance()) return this; - if (other.hasMethodName()) { - setMethodName(other.getMethodName()); - } - if (other.hasRequest()) { - setRequest(other.getRequest()); - } - if (other.hasRequestClassName()) { - setRequestClassName(other.getRequestClassName()); - } + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { - if (!hasMethodName()) { - - return false; - } return true; } @@ -2090,283 +1683,60 @@ public final class RPCProtos { } break; } - case 10: { - bitField0_ |= 0x00000001; - methodName_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - request_ = input.readBytes(); - break; - } - case 34: { - bitField0_ |= 0x00000004; - requestClassName_ = input.readBytes(); - break; - } } } } - private int bitField0_; - - // required string methodName = 1; - private java.lang.Object methodName_ = ""; - public boolean hasMethodName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public String getMethodName() { - java.lang.Object ref = methodName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - methodName_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setMethodName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - methodName_ = value; - onChanged(); - return this; - } - public Builder clearMethodName() { - bitField0_ = (bitField0_ & ~0x00000001); - methodName_ = getDefaultInstance().getMethodName(); - onChanged(); - return this; - } - void setMethodName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - methodName_ = value; - onChanged(); - } - - // optional bytes request = 2; - private com.google.protobuf.ByteString request_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasRequest() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getRequest() { - return request_; - } - public Builder setRequest(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - request_ = value; - onChanged(); - return this; - } - public Builder clearRequest() { - bitField0_ = (bitField0_ & ~0x00000002); - request_ = getDefaultInstance().getRequest(); - onChanged(); - return this; - } - - // optional string requestClassName = 4; - private java.lang.Object requestClassName_ = ""; - public boolean hasRequestClassName() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public String getRequestClassName() { - java.lang.Object ref = requestClassName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - requestClassName_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setRequestClassName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - requestClassName_ = value; - onChanged(); - return this; - } - public Builder clearRequestClassName() { - bitField0_ = (bitField0_ & ~0x00000004); - requestClassName_ = getDefaultInstance().getRequestClassName(); - onChanged(); - return this; - } - void setRequestClassName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000004; - requestClassName_ = value; - onChanged(); - } - // @@protoc_insertion_point(builder_scope:RpcRequestBody) + // @@protoc_insertion_point(builder_scope:RequestParamMeta) } static { - defaultInstance = new RpcRequestBody(true); + defaultInstance = new RequestParamMeta(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:RpcRequestBody) + // @@protoc_insertion_point(class_scope:RequestParamMeta) } - public interface RpcResponseHeaderOrBuilder + public interface ResponseMetaOrBuilder extends com.google.protobuf.MessageOrBuilder { - - // required uint32 callId = 1; - boolean hasCallId(); - int getCallId(); - - // required .RpcResponseHeader.Status status = 2; - boolean hasStatus(); - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus(); } - public static final class RpcResponseHeader extends + public static final class ResponseMeta extends com.google.protobuf.GeneratedMessage - implements RpcResponseHeaderOrBuilder { - // Use RpcResponseHeader.newBuilder() to construct. - private RpcResponseHeader(Builder builder) { + implements ResponseMetaOrBuilder { + // Use ResponseMeta.newBuilder() to construct. + private ResponseMeta(Builder builder) { super(builder); } - private RpcResponseHeader(boolean noInit) {} + private ResponseMeta(boolean noInit) {} - private static final RpcResponseHeader defaultInstance; - public static RpcResponseHeader getDefaultInstance() { + private static final ResponseMeta defaultInstance; + public static ResponseMeta getDefaultInstance() { return defaultInstance; } - public RpcResponseHeader getDefaultInstanceForType() { + public ResponseMeta getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ResponseMeta_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_fieldAccessorTable; - } - - public enum Status - implements com.google.protobuf.ProtocolMessageEnum { - SUCCESS(0, 0), - ERROR(1, 1), - FATAL(2, 2), - ; - - public static final int SUCCESS_VALUE = 0; - public static final int ERROR_VALUE = 1; - public static final int FATAL_VALUE = 2; - - - public final int getNumber() { return value; } - - public static Status valueOf(int value) { - switch (value) { - case 0: return SUCCESS; - case 1: return ERROR; - case 2: return FATAL; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public Status findValueByNumber(int number) { - return Status.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDescriptor().getEnumTypes().get(0); - } - - private static final Status[] VALUES = { - SUCCESS, ERROR, FATAL, - }; - - public static Status valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private Status(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:RpcResponseHeader.Status) - } - - private int bitField0_; - // required uint32 callId = 1; - public static final int CALLID_FIELD_NUMBER = 1; - private int callId_; - public boolean hasCallId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getCallId() { - return callId_; - } - - // required .RpcResponseHeader.Status status = 2; - public static final int STATUS_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status status_; - public boolean hasStatus() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus() { - return status_; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ResponseMeta_fieldAccessorTable; } private void initFields() { - callId_ = 0; - status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - if (!hasCallId()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasStatus()) { - memoizedIsInitialized = 0; - return false; - } memoizedIsInitialized = 1; return true; } @@ -2374,12 +1744,6 @@ public final class RPCProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt32(1, callId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, status_.getNumber()); - } getUnknownFields().writeTo(output); } @@ -2389,14 +1753,6 @@ public final class RPCProtos { if (size != -1) return size; size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(1, callId_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, status_.getNumber()); - } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -2414,22 +1770,12 @@ public final class RPCProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader) obj; + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta) obj; boolean result = true; - result = result && (hasCallId() == other.hasCallId()); - if (hasCallId()) { - result = result && (getCallId() - == other.getCallId()); - } - result = result && (hasStatus() == other.hasStatus()); - if (hasStatus()) { - result = result && - (getStatus() == other.getStatus()); - } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -2439,53 +1785,45 @@ public final class RPCProtos { public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasCallId()) { - hash = (37 * hash) + CALLID_FIELD_NUMBER; - hash = (53 * hash) + getCallId(); - } - if (hasStatus()) { - hash = (37 * hash) + STATUS_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getStatus()); - } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -2494,7 +1832,7 @@ public final class RPCProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2505,12 +1843,12 @@ public final class RPCProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2520,7 +1858,7 @@ public final class RPCProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -2533,18 +1871,18 @@ public final class RPCProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeaderOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMetaOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ResponseMeta_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseHeader_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ResponseMeta_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -2563,10 +1901,6 @@ public final class RPCProtos { public Builder clear() { super.clear(); - callId_ = 0; - bitField0_ = (bitField0_ & ~0x00000001); - status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; - bitField0_ = (bitField0_ & ~0x00000002); return this; } @@ -2576,24 +1910,24 @@ public final class RPCProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -2601,53 +1935,28 @@ public final class RPCProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.callId_ = callId_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.status_ = status_; - result.bitField0_ = to_bitField0_; + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.getDefaultInstance()) return this; - if (other.hasCallId()) { - setCallId(other.getCallId()); - } - if (other.hasStatus()) { - setStatus(other.getStatus()); - } + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { - if (!hasCallId()) { - - return false; - } - if (!hasStatus()) { - - return false; - } return true; } @@ -2674,134 +1983,198 @@ public final class RPCProtos { } break; } - case 8: { - bitField0_ |= 0x00000001; - callId_ = input.readUInt32(); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status value = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - status_ = value; - } - break; - } } } } - private int bitField0_; - - // required uint32 callId = 1; - private int callId_ ; - public boolean hasCallId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public int getCallId() { - return callId_; - } - public Builder setCallId(int value) { - bitField0_ |= 0x00000001; - callId_ = value; - onChanged(); - return this; - } - public Builder clearCallId() { - bitField0_ = (bitField0_ & ~0x00000001); - callId_ = 0; - onChanged(); - return this; - } - - // required .RpcResponseHeader.Status status = 2; - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; - public boolean hasStatus() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status getStatus() { - return status_; - } - public Builder setStatus(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - status_ = value; - onChanged(); - return this; - } - public Builder clearStatus() { - bitField0_ = (bitField0_ & ~0x00000002); - status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status.SUCCESS; - onChanged(); - return this; - } - // @@protoc_insertion_point(builder_scope:RpcResponseHeader) + // @@protoc_insertion_point(builder_scope:ResponseMeta) } static { - defaultInstance = new RpcResponseHeader(true); + defaultInstance = new ResponseMeta(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:RpcResponseHeader) + // @@protoc_insertion_point(class_scope:ResponseMeta) } - public interface RpcResponseBodyOrBuilder + public interface ExceptionResponseOrBuilder extends com.google.protobuf.MessageOrBuilder { - // optional bytes response = 1; - boolean hasResponse(); - com.google.protobuf.ByteString getResponse(); + // optional string exceptionClassName = 1; + boolean hasExceptionClassName(); + String getExceptionClassName(); + + // optional string stackTrace = 2; + boolean hasStackTrace(); + String getStackTrace(); + + // optional string hostname = 3; + boolean hasHostname(); + String getHostname(); + + // optional int32 port = 4; + boolean hasPort(); + int getPort(); + + // optional bool doNotRetry = 5; + boolean hasDoNotRetry(); + boolean getDoNotRetry(); } - public static final class RpcResponseBody extends + public static final class ExceptionResponse extends com.google.protobuf.GeneratedMessage - implements RpcResponseBodyOrBuilder { - // Use RpcResponseBody.newBuilder() to construct. - private RpcResponseBody(Builder builder) { + implements ExceptionResponseOrBuilder { + // Use ExceptionResponse.newBuilder() to construct. + private ExceptionResponse(Builder builder) { super(builder); } - private RpcResponseBody(boolean noInit) {} + private ExceptionResponse(boolean noInit) {} - private static final RpcResponseBody defaultInstance; - public static RpcResponseBody getDefaultInstance() { + private static final ExceptionResponse defaultInstance; + public static ExceptionResponse getDefaultInstance() { return defaultInstance; } - public RpcResponseBody getDefaultInstanceForType() { + public ExceptionResponse getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ExceptionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ExceptionResponse_fieldAccessorTable; } private int bitField0_; - // optional bytes response = 1; - public static final int RESPONSE_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString response_; - public boolean hasResponse() { + // optional string exceptionClassName = 1; + public static final int EXCEPTIONCLASSNAME_FIELD_NUMBER = 1; + private java.lang.Object exceptionClassName_; + public boolean hasExceptionClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public com.google.protobuf.ByteString getResponse() { - return response_; - } - - private void initFields() { - response_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; + public String getExceptionClassName() { + java.lang.Object ref = exceptionClassName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + exceptionClassName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getExceptionClassNameBytes() { + java.lang.Object ref = exceptionClassName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + exceptionClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string stackTrace = 2; + public static final int STACKTRACE_FIELD_NUMBER = 2; + private java.lang.Object stackTrace_; + public boolean hasStackTrace() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getStackTrace() { + java.lang.Object ref = stackTrace_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + stackTrace_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getStackTraceBytes() { + java.lang.Object ref = stackTrace_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + stackTrace_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string hostname = 3; + public static final int HOSTNAME_FIELD_NUMBER = 3; + private java.lang.Object hostname_; + public boolean hasHostname() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getHostname() { + java.lang.Object ref = hostname_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + hostname_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getHostnameBytes() { + java.lang.Object ref = hostname_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + hostname_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional int32 port = 4; + public static final int PORT_FIELD_NUMBER = 4; + private int port_; + public boolean hasPort() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public int getPort() { + return port_; + } + + // optional bool doNotRetry = 5; + public static final int DONOTRETRY_FIELD_NUMBER = 5; + private boolean doNotRetry_; + public boolean hasDoNotRetry() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public boolean getDoNotRetry() { + return doNotRetry_; + } + + private void initFields() { + exceptionClassName_ = ""; + stackTrace_ = ""; + hostname_ = ""; + port_ = 0; + doNotRetry_ = false; + } + private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; @@ -2814,7 +2187,19 @@ public final class RPCProtos { throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, response_); + output.writeBytes(1, getExceptionClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getStackTraceBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getHostnameBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeInt32(4, port_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBool(5, doNotRetry_); } getUnknownFields().writeTo(output); } @@ -2827,7 +2212,23 @@ public final class RPCProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, response_); + .computeBytesSize(1, getExceptionClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getStackTraceBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getHostnameBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(4, port_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, doNotRetry_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -2846,16 +2247,36 @@ public final class RPCProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody) obj; + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) obj; boolean result = true; - result = result && (hasResponse() == other.hasResponse()); - if (hasResponse()) { - result = result && getResponse() - .equals(other.getResponse()); + result = result && (hasExceptionClassName() == other.hasExceptionClassName()); + if (hasExceptionClassName()) { + result = result && getExceptionClassName() + .equals(other.getExceptionClassName()); + } + result = result && (hasStackTrace() == other.hasStackTrace()); + if (hasStackTrace()) { + result = result && getStackTrace() + .equals(other.getStackTrace()); + } + result = result && (hasHostname() == other.hasHostname()); + if (hasHostname()) { + result = result && getHostname() + .equals(other.getHostname()); + } + result = result && (hasPort() == other.hasPort()); + if (hasPort()) { + result = result && (getPort() + == other.getPort()); + } + result = result && (hasDoNotRetry() == other.hasDoNotRetry()); + if (hasDoNotRetry()) { + result = result && (getDoNotRetry() + == other.getDoNotRetry()); } result = result && getUnknownFields().equals(other.getUnknownFields()); @@ -2866,49 +2287,65 @@ public final class RPCProtos { public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasResponse()) { - hash = (37 * hash) + RESPONSE_FIELD_NUMBER; - hash = (53 * hash) + getResponse().hashCode(); + if (hasExceptionClassName()) { + hash = (37 * hash) + EXCEPTIONCLASSNAME_FIELD_NUMBER; + hash = (53 * hash) + getExceptionClassName().hashCode(); + } + if (hasStackTrace()) { + hash = (37 * hash) + STACKTRACE_FIELD_NUMBER; + hash = (53 * hash) + getStackTrace().hashCode(); + } + if (hasHostname()) { + hash = (37 * hash) + HOSTNAME_FIELD_NUMBER; + hash = (53 * hash) + getHostname().hashCode(); + } + if (hasPort()) { + hash = (37 * hash) + PORT_FIELD_NUMBER; + hash = (53 * hash) + getPort(); + } + if (hasDoNotRetry()) { + hash = (37 * hash) + DONOTRETRY_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getDoNotRetry()); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -2917,7 +2354,7 @@ public final class RPCProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2928,12 +2365,12 @@ public final class RPCProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -2943,7 +2380,7 @@ public final class RPCProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -2956,18 +2393,18 @@ public final class RPCProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBodyOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ExceptionResponse_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponseBody_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ExceptionResponse_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -2986,8 +2423,16 @@ public final class RPCProtos { public Builder clear() { super.clear(); - response_ = com.google.protobuf.ByteString.EMPTY; + exceptionClassName_ = ""; bitField0_ = (bitField0_ & ~0x00000001); + stackTrace_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + hostname_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + port_ = 0; + bitField0_ = (bitField0_ & ~0x00000008); + doNotRetry_ = false; + bitField0_ = (bitField0_ & ~0x00000010); return this; } @@ -2997,24 +2442,24 @@ public final class RPCProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -3022,32 +2467,60 @@ public final class RPCProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody(this); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.response_ = response_; + result.exceptionClassName_ = exceptionClassName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.stackTrace_ = stackTrace_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.hostname_ = hostname_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.port_ = port_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.doNotRetry_ = doNotRetry_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.getDefaultInstance()) return this; - if (other.hasResponse()) { - setResponse(other.getResponse()); + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance()) return this; + if (other.hasExceptionClassName()) { + setExceptionClassName(other.getExceptionClassName()); + } + if (other.hasStackTrace()) { + setStackTrace(other.getStackTrace()); + } + if (other.hasHostname()) { + setHostname(other.getHostname()); + } + if (other.hasPort()) { + setPort(other.getPort()); + } + if (other.hasDoNotRetry()) { + setDoNotRetry(other.getDoNotRetry()); } this.mergeUnknownFields(other.getUnknownFields()); return this; @@ -3082,7 +2555,27 @@ public final class RPCProtos { } case 10: { bitField0_ |= 0x00000001; - response_ = input.readBytes(); + exceptionClassName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + stackTrace_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + hostname_ = input.readBytes(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + port_ = input.readInt32(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + doNotRetry_ = input.readBool(); break; } } @@ -3091,121 +2584,253 @@ public final class RPCProtos { private int bitField0_; - // optional bytes response = 1; - private com.google.protobuf.ByteString response_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasResponse() { + // optional string exceptionClassName = 1; + private java.lang.Object exceptionClassName_ = ""; + public boolean hasExceptionClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public com.google.protobuf.ByteString getResponse() { - return response_; + public String getExceptionClassName() { + java.lang.Object ref = exceptionClassName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + exceptionClassName_ = s; + return s; + } else { + return (String) ref; + } } - public Builder setResponse(com.google.protobuf.ByteString value) { + public Builder setExceptionClassName(String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; - response_ = value; + exceptionClassName_ = value; onChanged(); return this; } - public Builder clearResponse() { + public Builder clearExceptionClassName() { bitField0_ = (bitField0_ & ~0x00000001); - response_ = getDefaultInstance().getResponse(); + exceptionClassName_ = getDefaultInstance().getExceptionClassName(); + onChanged(); + return this; + } + void setExceptionClassName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + exceptionClassName_ = value; + onChanged(); + } + + // optional string stackTrace = 2; + private java.lang.Object stackTrace_ = ""; + public boolean hasStackTrace() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getStackTrace() { + java.lang.Object ref = stackTrace_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + stackTrace_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setStackTrace(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + stackTrace_ = value; + onChanged(); + return this; + } + public Builder clearStackTrace() { + bitField0_ = (bitField0_ & ~0x00000002); + stackTrace_ = getDefaultInstance().getStackTrace(); + onChanged(); + return this; + } + void setStackTrace(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000002; + stackTrace_ = value; + onChanged(); + } + + // optional string hostname = 3; + private java.lang.Object hostname_ = ""; + public boolean hasHostname() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getHostname() { + java.lang.Object ref = hostname_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + hostname_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setHostname(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + hostname_ = value; + onChanged(); + return this; + } + public Builder clearHostname() { + bitField0_ = (bitField0_ & ~0x00000004); + hostname_ = getDefaultInstance().getHostname(); + onChanged(); + return this; + } + void setHostname(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000004; + hostname_ = value; + onChanged(); + } + + // optional int32 port = 4; + private int port_ ; + public boolean hasPort() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public int getPort() { + return port_; + } + public Builder setPort(int value) { + bitField0_ |= 0x00000008; + port_ = value; + onChanged(); + return this; + } + public Builder clearPort() { + bitField0_ = (bitField0_ & ~0x00000008); + port_ = 0; + onChanged(); + return this; + } + + // optional bool doNotRetry = 5; + private boolean doNotRetry_ ; + public boolean hasDoNotRetry() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public boolean getDoNotRetry() { + return doNotRetry_; + } + public Builder setDoNotRetry(boolean value) { + bitField0_ |= 0x00000010; + doNotRetry_ = value; + onChanged(); + return this; + } + public Builder clearDoNotRetry() { + bitField0_ = (bitField0_ & ~0x00000010); + doNotRetry_ = false; onChanged(); return this; } - // @@protoc_insertion_point(builder_scope:RpcResponseBody) + // @@protoc_insertion_point(builder_scope:ExceptionResponse) } static { - defaultInstance = new RpcResponseBody(true); + defaultInstance = new ExceptionResponse(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:RpcResponseBody) + // @@protoc_insertion_point(class_scope:ExceptionResponse) } - public interface RpcExceptionOrBuilder + public interface RequestHeaderOrBuilder extends com.google.protobuf.MessageOrBuilder { - // required string exceptionName = 1; - boolean hasExceptionName(); - String getExceptionName(); + // optional uint32 callId = 1; + boolean hasCallId(); + int getCallId(); + + // optional .RPCTInfo traceInfo = 2; + boolean hasTraceInfo(); + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTraceInfo(); + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTraceInfoOrBuilder(); - // optional string stackTrace = 2; - boolean hasStackTrace(); - String getStackTrace(); + // optional string methodName = 3; + boolean hasMethodName(); + String getMethodName(); + + // optional .RequestParamMeta requestParamMeta = 4; + boolean hasRequestParamMeta(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta getRequestParamMeta(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMetaOrBuilder getRequestParamMetaOrBuilder(); + + // optional .EncodedDataBlockMeta encodedDataMeta = 5; + boolean hasEncodedDataMeta(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta getEncodedDataMeta(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMetaOrBuilder getEncodedDataMetaOrBuilder(); } - public static final class RpcException extends + public static final class RequestHeader extends com.google.protobuf.GeneratedMessage - implements RpcExceptionOrBuilder { - // Use RpcException.newBuilder() to construct. - private RpcException(Builder builder) { + implements RequestHeaderOrBuilder { + // Use RequestHeader.newBuilder() to construct. + private RequestHeader(Builder builder) { super(builder); } - private RpcException(boolean noInit) {} + private RequestHeader(boolean noInit) {} - private static final RpcException defaultInstance; - public static RpcException getDefaultInstance() { + private static final RequestHeader defaultInstance; + public static RequestHeader getDefaultInstance() { return defaultInstance; } - public RpcException getDefaultInstanceForType() { + public RequestHeader getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcException_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RequestHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcException_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RequestHeader_fieldAccessorTable; } private int bitField0_; - // required string exceptionName = 1; - public static final int EXCEPTIONNAME_FIELD_NUMBER = 1; - private java.lang.Object exceptionName_; - public boolean hasExceptionName() { + // optional uint32 callId = 1; + public static final int CALLID_FIELD_NUMBER = 1; + private int callId_; + public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getExceptionName() { - java.lang.Object ref = exceptionName_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - exceptionName_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getExceptionNameBytes() { - java.lang.Object ref = exceptionName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - exceptionName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } + public int getCallId() { + return callId_; } - // optional string stackTrace = 2; - public static final int STACKTRACE_FIELD_NUMBER = 2; - private java.lang.Object stackTrace_; - public boolean hasStackTrace() { + // optional .RPCTInfo traceInfo = 2; + public static final int TRACEINFO_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo traceInfo_; + public boolean hasTraceInfo() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getStackTrace() { - java.lang.Object ref = stackTrace_; + public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTraceInfo() { + return traceInfo_; + } + public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTraceInfoOrBuilder() { + return traceInfo_; + } + + // optional string methodName = 3; + public static final int METHODNAME_FIELD_NUMBER = 3; + private java.lang.Object methodName_; + public boolean hasMethodName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getMethodName() { + java.lang.Object ref = methodName_; if (ref instanceof String) { return (String) ref; } else { @@ -3213,36 +2838,954 @@ public final class RPCProtos { (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); if (com.google.protobuf.Internal.isValidUtf8(bs)) { - stackTrace_ = s; + methodName_ = s; } return s; } } - private com.google.protobuf.ByteString getStackTraceBytes() { - java.lang.Object ref = stackTrace_; + private com.google.protobuf.ByteString getMethodNameBytes() { + java.lang.Object ref = methodName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); - stackTrace_ = b; + methodName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } + // optional .RequestParamMeta requestParamMeta = 4; + public static final int REQUESTPARAMMETA_FIELD_NUMBER = 4; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta requestParamMeta_; + public boolean hasRequestParamMeta() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta getRequestParamMeta() { + return requestParamMeta_; + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMetaOrBuilder getRequestParamMetaOrBuilder() { + return requestParamMeta_; + } + + // optional .EncodedDataBlockMeta encodedDataMeta = 5; + public static final int ENCODEDDATAMETA_FIELD_NUMBER = 5; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta encodedDataMeta_; + public boolean hasEncodedDataMeta() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta getEncodedDataMeta() { + return encodedDataMeta_; + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMetaOrBuilder getEncodedDataMetaOrBuilder() { + return encodedDataMeta_; + } + + private void initFields() { + callId_ = 0; + traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); + methodName_ = ""; + requestParamMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.getDefaultInstance(); + encodedDataMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt32(1, callId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, traceInfo_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getMethodNameBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeMessage(4, requestParamMeta_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeMessage(5, encodedDataMeta_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(1, callId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, traceInfo_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getMethodNameBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, requestParamMeta_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, encodedDataMeta_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader) obj; + + boolean result = true; + result = result && (hasCallId() == other.hasCallId()); + if (hasCallId()) { + result = result && (getCallId() + == other.getCallId()); + } + result = result && (hasTraceInfo() == other.hasTraceInfo()); + if (hasTraceInfo()) { + result = result && getTraceInfo() + .equals(other.getTraceInfo()); + } + result = result && (hasMethodName() == other.hasMethodName()); + if (hasMethodName()) { + result = result && getMethodName() + .equals(other.getMethodName()); + } + result = result && (hasRequestParamMeta() == other.hasRequestParamMeta()); + if (hasRequestParamMeta()) { + result = result && getRequestParamMeta() + .equals(other.getRequestParamMeta()); + } + result = result && (hasEncodedDataMeta() == other.hasEncodedDataMeta()); + if (hasEncodedDataMeta()) { + result = result && getEncodedDataMeta() + .equals(other.getEncodedDataMeta()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasCallId()) { + hash = (37 * hash) + CALLID_FIELD_NUMBER; + hash = (53 * hash) + getCallId(); + } + if (hasTraceInfo()) { + hash = (37 * hash) + TRACEINFO_FIELD_NUMBER; + hash = (53 * hash) + getTraceInfo().hashCode(); + } + if (hasMethodName()) { + hash = (37 * hash) + METHODNAME_FIELD_NUMBER; + hash = (53 * hash) + getMethodName().hashCode(); + } + if (hasRequestParamMeta()) { + hash = (37 * hash) + REQUESTPARAMMETA_FIELD_NUMBER; + hash = (53 * hash) + getRequestParamMeta().hashCode(); + } + if (hasEncodedDataMeta()) { + hash = (37 * hash) + ENCODEDDATAMETA_FIELD_NUMBER; + hash = (53 * hash) + getEncodedDataMeta().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeaderOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RequestHeader_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RequestHeader_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getTraceInfoFieldBuilder(); + getRequestParamMetaFieldBuilder(); + getEncodedDataMetaFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + callId_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + if (traceInfoBuilder_ == null) { + traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); + } else { + traceInfoBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + methodName_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + if (requestParamMetaBuilder_ == null) { + requestParamMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.getDefaultInstance(); + } else { + requestParamMetaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000008); + if (encodedDataMetaBuilder_ == null) { + encodedDataMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.getDefaultInstance(); + } else { + encodedDataMetaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.callId_ = callId_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (traceInfoBuilder_ == null) { + result.traceInfo_ = traceInfo_; + } else { + result.traceInfo_ = traceInfoBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.methodName_ = methodName_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + if (requestParamMetaBuilder_ == null) { + result.requestParamMeta_ = requestParamMeta_; + } else { + result.requestParamMeta_ = requestParamMetaBuilder_.build(); + } + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + if (encodedDataMetaBuilder_ == null) { + result.encodedDataMeta_ = encodedDataMeta_; + } else { + result.encodedDataMeta_ = encodedDataMetaBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.getDefaultInstance()) return this; + if (other.hasCallId()) { + setCallId(other.getCallId()); + } + if (other.hasTraceInfo()) { + mergeTraceInfo(other.getTraceInfo()); + } + if (other.hasMethodName()) { + setMethodName(other.getMethodName()); + } + if (other.hasRequestParamMeta()) { + mergeRequestParamMeta(other.getRequestParamMeta()); + } + if (other.hasEncodedDataMeta()) { + mergeEncodedDataMeta(other.getEncodedDataMeta()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + callId_ = input.readUInt32(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder(); + if (hasTraceInfo()) { + subBuilder.mergeFrom(getTraceInfo()); + } + input.readMessage(subBuilder, extensionRegistry); + setTraceInfo(subBuilder.buildPartial()); + break; + } + case 26: { + bitField0_ |= 0x00000004; + methodName_ = input.readBytes(); + break; + } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.newBuilder(); + if (hasRequestParamMeta()) { + subBuilder.mergeFrom(getRequestParamMeta()); + } + input.readMessage(subBuilder, extensionRegistry); + setRequestParamMeta(subBuilder.buildPartial()); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.newBuilder(); + if (hasEncodedDataMeta()) { + subBuilder.mergeFrom(getEncodedDataMeta()); + } + input.readMessage(subBuilder, extensionRegistry); + setEncodedDataMeta(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // optional uint32 callId = 1; + private int callId_ ; + public boolean hasCallId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getCallId() { + return callId_; + } + public Builder setCallId(int value) { + bitField0_ |= 0x00000001; + callId_ = value; + onChanged(); + return this; + } + public Builder clearCallId() { + bitField0_ = (bitField0_ & ~0x00000001); + callId_ = 0; + onChanged(); + return this; + } + + // optional .RPCTInfo traceInfo = 2; + private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder> traceInfoBuilder_; + public boolean hasTraceInfo() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getTraceInfo() { + if (traceInfoBuilder_ == null) { + return traceInfo_; + } else { + return traceInfoBuilder_.getMessage(); + } + } + public Builder setTraceInfo(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo value) { + if (traceInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + traceInfo_ = value; + onChanged(); + } else { + traceInfoBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setTraceInfo( + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder builderForValue) { + if (traceInfoBuilder_ == null) { + traceInfo_ = builderForValue.build(); + onChanged(); + } else { + traceInfoBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeTraceInfo(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo value) { + if (traceInfoBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + traceInfo_ != org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance()) { + traceInfo_ = + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder(traceInfo_).mergeFrom(value).buildPartial(); + } else { + traceInfo_ = value; + } + onChanged(); + } else { + traceInfoBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearTraceInfo() { + if (traceInfoBuilder_ == null) { + traceInfo_ = org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance(); + onChanged(); + } else { + traceInfoBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder getTraceInfoBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getTraceInfoFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder getTraceInfoOrBuilder() { + if (traceInfoBuilder_ != null) { + return traceInfoBuilder_.getMessageOrBuilder(); + } else { + return traceInfo_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder> + getTraceInfoFieldBuilder() { + if (traceInfoBuilder_ == null) { + traceInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder>( + traceInfo_, + getParentForChildren(), + isClean()); + traceInfo_ = null; + } + return traceInfoBuilder_; + } + + // optional string methodName = 3; + private java.lang.Object methodName_ = ""; + public boolean hasMethodName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getMethodName() { + java.lang.Object ref = methodName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + methodName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setMethodName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + methodName_ = value; + onChanged(); + return this; + } + public Builder clearMethodName() { + bitField0_ = (bitField0_ & ~0x00000004); + methodName_ = getDefaultInstance().getMethodName(); + onChanged(); + return this; + } + void setMethodName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000004; + methodName_ = value; + onChanged(); + } + + // optional .RequestParamMeta requestParamMeta = 4; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta requestParamMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMetaOrBuilder> requestParamMetaBuilder_; + public boolean hasRequestParamMeta() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta getRequestParamMeta() { + if (requestParamMetaBuilder_ == null) { + return requestParamMeta_; + } else { + return requestParamMetaBuilder_.getMessage(); + } + } + public Builder setRequestParamMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta value) { + if (requestParamMetaBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + requestParamMeta_ = value; + onChanged(); + } else { + requestParamMetaBuilder_.setMessage(value); + } + bitField0_ |= 0x00000008; + return this; + } + public Builder setRequestParamMeta( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.Builder builderForValue) { + if (requestParamMetaBuilder_ == null) { + requestParamMeta_ = builderForValue.build(); + onChanged(); + } else { + requestParamMetaBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000008; + return this; + } + public Builder mergeRequestParamMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta value) { + if (requestParamMetaBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008) && + requestParamMeta_ != org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.getDefaultInstance()) { + requestParamMeta_ = + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.newBuilder(requestParamMeta_).mergeFrom(value).buildPartial(); + } else { + requestParamMeta_ = value; + } + onChanged(); + } else { + requestParamMetaBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000008; + return this; + } + public Builder clearRequestParamMeta() { + if (requestParamMetaBuilder_ == null) { + requestParamMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.getDefaultInstance(); + onChanged(); + } else { + requestParamMetaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.Builder getRequestParamMetaBuilder() { + bitField0_ |= 0x00000008; + onChanged(); + return getRequestParamMetaFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMetaOrBuilder getRequestParamMetaOrBuilder() { + if (requestParamMetaBuilder_ != null) { + return requestParamMetaBuilder_.getMessageOrBuilder(); + } else { + return requestParamMeta_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMetaOrBuilder> + getRequestParamMetaFieldBuilder() { + if (requestParamMetaBuilder_ == null) { + requestParamMetaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMetaOrBuilder>( + requestParamMeta_, + getParentForChildren(), + isClean()); + requestParamMeta_ = null; + } + return requestParamMetaBuilder_; + } + + // optional .EncodedDataBlockMeta encodedDataMeta = 5; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta encodedDataMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMetaOrBuilder> encodedDataMetaBuilder_; + public boolean hasEncodedDataMeta() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta getEncodedDataMeta() { + if (encodedDataMetaBuilder_ == null) { + return encodedDataMeta_; + } else { + return encodedDataMetaBuilder_.getMessage(); + } + } + public Builder setEncodedDataMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta value) { + if (encodedDataMetaBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + encodedDataMeta_ = value; + onChanged(); + } else { + encodedDataMetaBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder setEncodedDataMeta( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.Builder builderForValue) { + if (encodedDataMetaBuilder_ == null) { + encodedDataMeta_ = builderForValue.build(); + onChanged(); + } else { + encodedDataMetaBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder mergeEncodedDataMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta value) { + if (encodedDataMetaBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010) && + encodedDataMeta_ != org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.getDefaultInstance()) { + encodedDataMeta_ = + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.newBuilder(encodedDataMeta_).mergeFrom(value).buildPartial(); + } else { + encodedDataMeta_ = value; + } + onChanged(); + } else { + encodedDataMetaBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder clearEncodedDataMeta() { + if (encodedDataMetaBuilder_ == null) { + encodedDataMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.getDefaultInstance(); + onChanged(); + } else { + encodedDataMetaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.Builder getEncodedDataMetaBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getEncodedDataMetaFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMetaOrBuilder getEncodedDataMetaOrBuilder() { + if (encodedDataMetaBuilder_ != null) { + return encodedDataMetaBuilder_.getMessageOrBuilder(); + } else { + return encodedDataMeta_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMetaOrBuilder> + getEncodedDataMetaFieldBuilder() { + if (encodedDataMetaBuilder_ == null) { + encodedDataMetaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMetaOrBuilder>( + encodedDataMeta_, + getParentForChildren(), + isClean()); + encodedDataMeta_ = null; + } + return encodedDataMetaBuilder_; + } + + // @@protoc_insertion_point(builder_scope:RequestHeader) + } + + static { + defaultInstance = new RequestHeader(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RequestHeader) + } + + public interface ResponseHeaderOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional uint32 callId = 1; + boolean hasCallId(); + int getCallId(); + + // optional .ExceptionResponse exception = 2; + boolean hasException(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse getException(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder getExceptionOrBuilder(); + + // optional .ResponseMeta responseMeta = 3; + boolean hasResponseMeta(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta getResponseMeta(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMetaOrBuilder getResponseMetaOrBuilder(); + + // optional .EncodedDataBlockMeta encodedDataMeta = 4; + boolean hasEncodedDataMeta(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta getEncodedDataMeta(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMetaOrBuilder getEncodedDataMetaOrBuilder(); + } + public static final class ResponseHeader extends + com.google.protobuf.GeneratedMessage + implements ResponseHeaderOrBuilder { + // Use ResponseHeader.newBuilder() to construct. + private ResponseHeader(Builder builder) { + super(builder); + } + private ResponseHeader(boolean noInit) {} + + private static final ResponseHeader defaultInstance; + public static ResponseHeader getDefaultInstance() { + return defaultInstance; + } + + public ResponseHeader getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ResponseHeader_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ResponseHeader_fieldAccessorTable; + } + + private int bitField0_; + // optional uint32 callId = 1; + public static final int CALLID_FIELD_NUMBER = 1; + private int callId_; + public boolean hasCallId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getCallId() { + return callId_; + } + + // optional .ExceptionResponse exception = 2; + public static final int EXCEPTION_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse exception_; + public boolean hasException() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse getException() { + return exception_; + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder getExceptionOrBuilder() { + return exception_; + } + + // optional .ResponseMeta responseMeta = 3; + public static final int RESPONSEMETA_FIELD_NUMBER = 3; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta responseMeta_; + public boolean hasResponseMeta() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta getResponseMeta() { + return responseMeta_; + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMetaOrBuilder getResponseMetaOrBuilder() { + return responseMeta_; + } + + // optional .EncodedDataBlockMeta encodedDataMeta = 4; + public static final int ENCODEDDATAMETA_FIELD_NUMBER = 4; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta encodedDataMeta_; + public boolean hasEncodedDataMeta() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta getEncodedDataMeta() { + return encodedDataMeta_; + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMetaOrBuilder getEncodedDataMetaOrBuilder() { + return encodedDataMeta_; + } + private void initFields() { - exceptionName_ = ""; - stackTrace_ = ""; + callId_ = 0; + exception_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); + responseMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.getDefaultInstance(); + encodedDataMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - if (!hasExceptionName()) { - memoizedIsInitialized = 0; - return false; - } memoizedIsInitialized = 1; return true; } @@ -3251,10 +3794,16 @@ public final class RPCProtos { throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getExceptionNameBytes()); + output.writeUInt32(1, callId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getStackTraceBytes()); + output.writeMessage(2, exception_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeMessage(3, responseMeta_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeMessage(4, encodedDataMeta_); } getUnknownFields().writeTo(output); } @@ -3267,11 +3816,19 @@ public final class RPCProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getExceptionNameBytes()); + .computeUInt32Size(1, callId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getStackTraceBytes()); + .computeMessageSize(2, exception_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, responseMeta_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, encodedDataMeta_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -3290,21 +3847,31 @@ public final class RPCProtos { if (obj == this) { return true; } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException)) { + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader)) { return super.equals(obj); } - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException) obj; + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader other = (org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader) obj; boolean result = true; - result = result && (hasExceptionName() == other.hasExceptionName()); - if (hasExceptionName()) { - result = result && getExceptionName() - .equals(other.getExceptionName()); + result = result && (hasCallId() == other.hasCallId()); + if (hasCallId()) { + result = result && (getCallId() + == other.getCallId()); } - result = result && (hasStackTrace() == other.hasStackTrace()); - if (hasStackTrace()) { - result = result && getStackTrace() - .equals(other.getStackTrace()); + result = result && (hasException() == other.hasException()); + if (hasException()) { + result = result && getException() + .equals(other.getException()); + } + result = result && (hasResponseMeta() == other.hasResponseMeta()); + if (hasResponseMeta()) { + result = result && getResponseMeta() + .equals(other.getResponseMeta()); + } + result = result && (hasEncodedDataMeta() == other.hasEncodedDataMeta()); + if (hasEncodedDataMeta()) { + result = result && getEncodedDataMeta() + .equals(other.getEncodedDataMeta()); } result = result && getUnknownFields().equals(other.getUnknownFields()); @@ -3315,53 +3882,61 @@ public final class RPCProtos { public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasExceptionName()) { - hash = (37 * hash) + EXCEPTIONNAME_FIELD_NUMBER; - hash = (53 * hash) + getExceptionName().hashCode(); + if (hasCallId()) { + hash = (37 * hash) + CALLID_FIELD_NUMBER; + hash = (53 * hash) + getCallId(); } - if (hasStackTrace()) { - hash = (37 * hash) + STACKTRACE_FIELD_NUMBER; - hash = (53 * hash) + getStackTrace().hashCode(); + if (hasException()) { + hash = (37 * hash) + EXCEPTION_FIELD_NUMBER; + hash = (53 * hash) + getException().hashCode(); + } + if (hasResponseMeta()) { + hash = (37 * hash) + RESPONSEMETA_FIELD_NUMBER; + hash = (53 * hash) + getResponseMeta().hashCode(); + } + if (hasEncodedDataMeta()) { + hash = (37 * hash) + ENCODEDDATAMETA_FIELD_NUMBER; + hash = (53 * hash) + getEncodedDataMeta().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom(byte[] data) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -3370,7 +3945,7 @@ public final class RPCProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseDelimitedFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -3381,12 +3956,12 @@ public final class RPCProtos { return null; } } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException parseFrom( + public static org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -3396,7 +3971,7 @@ public final class RPCProtos { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException prototype) { + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -3409,18 +3984,18 @@ public final class RPCProtos { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcExceptionOrBuilder { + implements org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeaderOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcException_descriptor; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ResponseHeader_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcException_fieldAccessorTable; + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_ResponseHeader_fieldAccessorTable; } - // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.newBuilder() + // Construct using org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -3431,6 +4006,9 @@ public final class RPCProtos { } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getExceptionFieldBuilder(); + getResponseMetaFieldBuilder(); + getEncodedDataMetaFieldBuilder(); } } private static Builder create() { @@ -3439,10 +4017,26 @@ public final class RPCProtos { public Builder clear() { super.clear(); - exceptionName_ = ""; + callId_ = 0; bitField0_ = (bitField0_ & ~0x00000001); - stackTrace_ = ""; + if (exceptionBuilder_ == null) { + exception_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); + } else { + exceptionBuilder_.clear(); + } bitField0_ = (bitField0_ & ~0x00000002); + if (responseMetaBuilder_ == null) { + responseMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.getDefaultInstance(); + } else { + responseMetaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + if (encodedDataMetaBuilder_ == null) { + encodedDataMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.getDefaultInstance(); + } else { + encodedDataMetaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000008); return this; } @@ -3452,24 +4046,24 @@ public final class RPCProtos { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.getDescriptor(); + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.getDescriptor(); } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.getDefaultInstance(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.getDefaultInstance(); } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException build() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException result = buildPartial(); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader build() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException buildParsed() + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException result = buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); @@ -3477,49 +4071,71 @@ public final class RPCProtos { return result; } - public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException(this); + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader result = new org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.exceptionName_ = exceptionName_; + result.callId_ = callId_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } - result.stackTrace_ = stackTrace_; + if (exceptionBuilder_ == null) { + result.exception_ = exception_; + } else { + result.exception_ = exceptionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + if (responseMetaBuilder_ == null) { + result.responseMeta_ = responseMeta_; + } else { + result.responseMeta_ = responseMetaBuilder_.build(); + } + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + if (encodedDataMetaBuilder_ == null) { + result.encodedDataMeta_ = encodedDataMeta_; + } else { + result.encodedDataMeta_ = encodedDataMetaBuilder_.build(); + } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException)other); + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.getDefaultInstance()) return this; - if (other.hasExceptionName()) { - setExceptionName(other.getExceptionName()); + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.getDefaultInstance()) return this; + if (other.hasCallId()) { + setCallId(other.getCallId()); } - if (other.hasStackTrace()) { - setStackTrace(other.getStackTrace()); + if (other.hasException()) { + mergeException(other.getException()); + } + if (other.hasResponseMeta()) { + mergeResponseMeta(other.getResponseMeta()); + } + if (other.hasEncodedDataMeta()) { + mergeEncodedDataMeta(other.getEncodedDataMeta()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { - if (!hasExceptionName()) { - - return false; - } return true; } @@ -3546,14 +4162,36 @@ public final class RPCProtos { } break; } - case 10: { + case 8: { bitField0_ |= 0x00000001; - exceptionName_ = input.readBytes(); + callId_ = input.readUInt32(); break; } case 18: { - bitField0_ |= 0x00000002; - stackTrace_ = input.readBytes(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.newBuilder(); + if (hasException()) { + subBuilder.mergeFrom(getException()); + } + input.readMessage(subBuilder, extensionRegistry); + setException(subBuilder.buildPartial()); + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.newBuilder(); + if (hasResponseMeta()) { + subBuilder.mergeFrom(getResponseMeta()); + } + input.readMessage(subBuilder, extensionRegistry); + setResponseMeta(subBuilder.buildPartial()); + break; + } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.newBuilder(); + if (hasEncodedDataMeta()) { + subBuilder.mergeFrom(getEncodedDataMeta()); + } + input.readMessage(subBuilder, extensionRegistry); + setEncodedDataMeta(subBuilder.buildPartial()); break; } } @@ -3562,87 +4200,306 @@ public final class RPCProtos { private int bitField0_; - // required string exceptionName = 1; - private java.lang.Object exceptionName_ = ""; - public boolean hasExceptionName() { + // optional uint32 callId = 1; + private int callId_ ; + public boolean hasCallId() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public String getExceptionName() { - java.lang.Object ref = exceptionName_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - exceptionName_ = s; - return s; - } else { - return (String) ref; - } + public int getCallId() { + return callId_; } - public Builder setExceptionName(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - exceptionName_ = value; + public Builder setCallId(int value) { + bitField0_ |= 0x00000001; + callId_ = value; onChanged(); return this; } - public Builder clearExceptionName() { + public Builder clearCallId() { bitField0_ = (bitField0_ & ~0x00000001); - exceptionName_ = getDefaultInstance().getExceptionName(); + callId_ = 0; onChanged(); return this; } - void setExceptionName(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000001; - exceptionName_ = value; - onChanged(); - } - // optional string stackTrace = 2; - private java.lang.Object stackTrace_ = ""; - public boolean hasStackTrace() { + // optional .ExceptionResponse exception = 2; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse exception_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder> exceptionBuilder_; + public boolean hasException() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public String getStackTrace() { - java.lang.Object ref = stackTrace_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - stackTrace_ = s; - return s; + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse getException() { + if (exceptionBuilder_ == null) { + return exception_; } else { - return (String) ref; + return exceptionBuilder_.getMessage(); } } - public Builder setStackTrace(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - stackTrace_ = value; - onChanged(); + public Builder setException(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse value) { + if (exceptionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + exception_ = value; + onChanged(); + } else { + exceptionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; return this; } - public Builder clearStackTrace() { + public Builder setException( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder builderForValue) { + if (exceptionBuilder_ == null) { + exception_ = builderForValue.build(); + onChanged(); + } else { + exceptionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse value) { + if (exceptionBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + exception_ != org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance()) { + exception_ = + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.newBuilder(exception_).mergeFrom(value).buildPartial(); + } else { + exception_ = value; + } + onChanged(); + } else { + exceptionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearException() { + if (exceptionBuilder_ == null) { + exception_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.getDefaultInstance(); + onChanged(); + } else { + exceptionBuilder_.clear(); + } bitField0_ = (bitField0_ & ~0x00000002); - stackTrace_ = getDefaultInstance().getStackTrace(); - onChanged(); return this; } - void setStackTrace(com.google.protobuf.ByteString value) { + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder getExceptionBuilder() { bitField0_ |= 0x00000002; - stackTrace_ = value; onChanged(); + return getExceptionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder getExceptionOrBuilder() { + if (exceptionBuilder_ != null) { + return exceptionBuilder_.getMessageOrBuilder(); + } else { + return exception_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder> + getExceptionFieldBuilder() { + if (exceptionBuilder_ == null) { + exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponseOrBuilder>( + exception_, + getParentForChildren(), + isClean()); + exception_ = null; + } + return exceptionBuilder_; + } + + // optional .ResponseMeta responseMeta = 3; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta responseMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMetaOrBuilder> responseMetaBuilder_; + public boolean hasResponseMeta() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta getResponseMeta() { + if (responseMetaBuilder_ == null) { + return responseMeta_; + } else { + return responseMetaBuilder_.getMessage(); + } + } + public Builder setResponseMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta value) { + if (responseMetaBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + responseMeta_ = value; + onChanged(); + } else { + responseMetaBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder setResponseMeta( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.Builder builderForValue) { + if (responseMetaBuilder_ == null) { + responseMeta_ = builderForValue.build(); + onChanged(); + } else { + responseMetaBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder mergeResponseMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta value) { + if (responseMetaBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004) && + responseMeta_ != org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.getDefaultInstance()) { + responseMeta_ = + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.newBuilder(responseMeta_).mergeFrom(value).buildPartial(); + } else { + responseMeta_ = value; + } + onChanged(); + } else { + responseMetaBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder clearResponseMeta() { + if (responseMetaBuilder_ == null) { + responseMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.getDefaultInstance(); + onChanged(); + } else { + responseMetaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.Builder getResponseMetaBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getResponseMetaFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMetaOrBuilder getResponseMetaOrBuilder() { + if (responseMetaBuilder_ != null) { + return responseMetaBuilder_.getMessageOrBuilder(); + } else { + return responseMeta_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMetaOrBuilder> + getResponseMetaFieldBuilder() { + if (responseMetaBuilder_ == null) { + responseMetaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMetaOrBuilder>( + responseMeta_, + getParentForChildren(), + isClean()); + responseMeta_ = null; + } + return responseMetaBuilder_; + } + + // optional .EncodedDataBlockMeta encodedDataMeta = 4; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta encodedDataMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMetaOrBuilder> encodedDataMetaBuilder_; + public boolean hasEncodedDataMeta() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta getEncodedDataMeta() { + if (encodedDataMetaBuilder_ == null) { + return encodedDataMeta_; + } else { + return encodedDataMetaBuilder_.getMessage(); + } + } + public Builder setEncodedDataMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta value) { + if (encodedDataMetaBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + encodedDataMeta_ = value; + onChanged(); + } else { + encodedDataMetaBuilder_.setMessage(value); + } + bitField0_ |= 0x00000008; + return this; + } + public Builder setEncodedDataMeta( + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.Builder builderForValue) { + if (encodedDataMetaBuilder_ == null) { + encodedDataMeta_ = builderForValue.build(); + onChanged(); + } else { + encodedDataMetaBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000008; + return this; + } + public Builder mergeEncodedDataMeta(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta value) { + if (encodedDataMetaBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008) && + encodedDataMeta_ != org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.getDefaultInstance()) { + encodedDataMeta_ = + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.newBuilder(encodedDataMeta_).mergeFrom(value).buildPartial(); + } else { + encodedDataMeta_ = value; + } + onChanged(); + } else { + encodedDataMetaBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000008; + return this; + } + public Builder clearEncodedDataMeta() { + if (encodedDataMetaBuilder_ == null) { + encodedDataMeta_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.getDefaultInstance(); + onChanged(); + } else { + encodedDataMetaBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.Builder getEncodedDataMetaBuilder() { + bitField0_ |= 0x00000008; + onChanged(); + return getEncodedDataMetaFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMetaOrBuilder getEncodedDataMetaOrBuilder() { + if (encodedDataMetaBuilder_ != null) { + return encodedDataMetaBuilder_.getMessageOrBuilder(); + } else { + return encodedDataMeta_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMetaOrBuilder> + getEncodedDataMetaFieldBuilder() { + if (encodedDataMetaBuilder_ == null) { + encodedDataMetaBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMetaOrBuilder>( + encodedDataMeta_, + getParentForChildren(), + isClean()); + encodedDataMeta_ = null; + } + return encodedDataMetaBuilder_; } - // @@protoc_insertion_point(builder_scope:RpcException) + // @@protoc_insertion_point(builder_scope:ResponseHeader) } static { - defaultInstance = new RpcException(true); + defaultInstance = new ResponseHeader(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:RpcException) + // @@protoc_insertion_point(class_scope:ResponseHeader) } private static com.google.protobuf.Descriptors.Descriptor @@ -3656,30 +4513,35 @@ public final class RPCProtos { com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ConnectionHeader_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_RpcRequestHeader_descriptor; + internal_static_EncodedDataBlockMeta_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RpcRequestHeader_fieldAccessorTable; + internal_static_EncodedDataBlockMeta_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_RpcRequestBody_descriptor; + internal_static_RequestParamMeta_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RpcRequestBody_fieldAccessorTable; + internal_static_RequestParamMeta_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_RpcResponseHeader_descriptor; + internal_static_ResponseMeta_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RpcResponseHeader_fieldAccessorTable; + internal_static_ResponseMeta_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_RpcResponseBody_descriptor; + internal_static_ExceptionResponse_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RpcResponseBody_fieldAccessorTable; + internal_static_ExceptionResponse_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor - internal_static_RpcException_descriptor; + internal_static_RequestHeader_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RpcException_fieldAccessorTable; + internal_static_RequestHeader_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ResponseHeader_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ResponseHeader_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -3689,22 +4551,26 @@ public final class RPCProtos { descriptor; static { java.lang.String[] descriptorData = { - "\n\tRPC.proto\032\rTracing.proto\":\n\017UserInform" + - "ation\022\025\n\reffectiveUser\030\001 \002(\t\022\020\n\010realUser" + - "\030\002 \001(\t\"w\n\020ConnectionHeader\022\"\n\010userInfo\030\001" + - " \001(\0132\020.UserInformation\022?\n\010protocol\030\002 \001(\t" + - ":-org.apache.hadoop.hbase.client.ClientP" + - "rotocol\"<\n\020RpcRequestHeader\022\016\n\006callId\030\001 " + - "\002(\r\022\030\n\005tinfo\030\002 \001(\0132\t.RPCTInfo\"O\n\016RpcRequ" + - "estBody\022\022\n\nmethodName\030\001 \002(\t\022\017\n\007request\030\002" + - " \001(\014\022\030\n\020requestClassName\030\004 \001(\t\"{\n\021RpcRes" + - "ponseHeader\022\016\n\006callId\030\001 \002(\r\022)\n\006status\030\002 ", - "\002(\0162\031.RpcResponseHeader.Status\"+\n\006Status" + - "\022\013\n\007SUCCESS\020\000\022\t\n\005ERROR\020\001\022\t\n\005FATAL\020\002\"#\n\017R" + - "pcResponseBody\022\020\n\010response\030\001 \001(\014\"9\n\014RpcE" + - "xception\022\025\n\rexceptionName\030\001 \002(\t\022\022\n\nstack" + - "Trace\030\002 \001(\tB<\n*org.apache.hadoop.hbase.p" + - "rotobuf.generatedB\tRPCProtosH\001\240\001\001" + "\n\tRPC.proto\032\rTracing.proto\032\013hbase.proto\"" + + ":\n\017UserInformation\022\025\n\reffectiveUser\030\001 \002(" + + "\t\022\020\n\010realUser\030\002 \001(\t\"w\n\020ConnectionHeader\022" + + "\"\n\010userInfo\030\001 \001(\0132\020.UserInformation\022?\n\010p" + + "rotocol\030\002 \001(\t:-org.apache.hadoop.hbase.c" + + "lient.ClientProtocol\"\026\n\024EncodedDataBlock" + + "Meta\"\022\n\020RequestParamMeta\"\016\n\014ResponseMeta" + + "\"w\n\021ExceptionResponse\022\032\n\022exceptionClassN" + + "ame\030\001 \001(\t\022\022\n\nstackTrace\030\002 \001(\t\022\020\n\010hostnam" + + "e\030\003 \001(\t\022\014\n\004port\030\004 \001(\005\022\022\n\ndoNotRetry\030\005 \001(", + "\010\"\256\001\n\rRequestHeader\022\016\n\006callId\030\001 \001(\r\022\034\n\tt" + + "raceInfo\030\002 \001(\0132\t.RPCTInfo\022\022\n\nmethodName\030" + + "\003 \001(\t\022+\n\020requestParamMeta\030\004 \001(\0132\021.Reques" + + "tParamMeta\022.\n\017encodedDataMeta\030\005 \001(\0132\025.En" + + "codedDataBlockMeta\"\234\001\n\016ResponseHeader\022\016\n" + + "\006callId\030\001 \001(\r\022%\n\texception\030\002 \001(\0132\022.Excep" + + "tionResponse\022#\n\014responseMeta\030\003 \001(\0132\r.Res" + + "ponseMeta\022.\n\017encodedDataMeta\030\004 \001(\0132\025.Enc" + + "odedDataBlockMetaB<\n*org.apache.hadoop.h" + + "base.protobuf.generatedB\tRPCProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -3727,46 +4593,54 @@ public final class RPCProtos { new java.lang.String[] { "UserInfo", "Protocol", }, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader.Builder.class); - internal_static_RpcRequestHeader_descriptor = + internal_static_EncodedDataBlockMeta_descriptor = getDescriptor().getMessageTypes().get(2); - internal_static_RpcRequestHeader_fieldAccessorTable = new + internal_static_EncodedDataBlockMeta_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RpcRequestHeader_descriptor, - new java.lang.String[] { "CallId", "Tinfo", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader.Builder.class); - internal_static_RpcRequestBody_descriptor = + internal_static_EncodedDataBlockMeta_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.class, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.EncodedDataBlockMeta.Builder.class); + internal_static_RequestParamMeta_descriptor = getDescriptor().getMessageTypes().get(3); - internal_static_RpcRequestBody_fieldAccessorTable = new + internal_static_RequestParamMeta_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RpcRequestBody_descriptor, - new java.lang.String[] { "MethodName", "Request", "RequestClassName", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.Builder.class); - internal_static_RpcResponseHeader_descriptor = + internal_static_RequestParamMeta_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.class, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta.Builder.class); + internal_static_ResponseMeta_descriptor = getDescriptor().getMessageTypes().get(4); - internal_static_RpcResponseHeader_fieldAccessorTable = new + internal_static_ResponseMeta_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RpcResponseHeader_descriptor, - new java.lang.String[] { "CallId", "Status", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Builder.class); - internal_static_RpcResponseBody_descriptor = + internal_static_ResponseMeta_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.class, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta.Builder.class); + internal_static_ExceptionResponse_descriptor = getDescriptor().getMessageTypes().get(5); - internal_static_RpcResponseBody_fieldAccessorTable = new + internal_static_ExceptionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RpcResponseBody_descriptor, - new java.lang.String[] { "Response", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody.Builder.class); - internal_static_RpcException_descriptor = + internal_static_ExceptionResponse_descriptor, + new java.lang.String[] { "ExceptionClassName", "StackTrace", "Hostname", "Port", "DoNotRetry", }, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse.Builder.class); + internal_static_RequestHeader_descriptor = getDescriptor().getMessageTypes().get(6); - internal_static_RpcException_fieldAccessorTable = new + internal_static_RequestHeader_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RequestHeader_descriptor, + new java.lang.String[] { "CallId", "TraceInfo", "MethodName", "RequestParamMeta", "EncodedDataMeta", }, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.class, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader.Builder.class); + internal_static_ResponseHeader_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_ResponseHeader_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RpcException_descriptor, - new java.lang.String[] { "ExceptionName", "StackTrace", }, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.class, - org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.Builder.class); + internal_static_ResponseHeader_descriptor, + new java.lang.String[] { "CallId", "Exception", "ResponseMeta", "EncodedDataMeta", }, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.class, + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader.Builder.class); return null; } }; @@ -3774,6 +4648,7 @@ public final class RPCProtos { .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.protobuf.generated.Tracing.getDescriptor(), + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner); } diff --git a/hbase-protocol/src/main/protobuf/RPC.proto b/hbase-protocol/src/main/protobuf/RPC.proto index 90ec7ce..ef8698e 100644 --- a/hbase-protocol/src/main/protobuf/RPC.proto +++ b/hbase-protocol/src/main/protobuf/RPC.proto @@ -15,123 +15,122 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -/** - * Specification of (unsecure) HBase RPC: - * - * Client needs to set up a connection first to a server serving a certain - * HBase protocol (like ClientProtocol). Once the connection is set up, the - * client and server communicates on that channel for RPC requests/responses. - * The sections below describe the flow. - * - * As part of setting up a connection to a server, the client needs to send - * the ConnectionHeader header. At the data level, this looks like - * <"hrpc"-bytearray><'5'[byte]> - * - * For every RPC that the client makes it needs to send the following - * RpcRequestHeader and the RpcRequestBody. At the data level this looks like: - * - * - * - * - * On a success, the server's protobuf response looks like - * - * - * On a failure, the server's protobuf response looks like - * - * - * - * There is one special message that's sent from client to server - - * the Ping message. At the data level, this is just the bytes corresponding - * to integer -1. - */ - import "Tracing.proto"; +import "hbase.proto"; option java_package = "org.apache.hadoop.hbase.protobuf.generated"; option java_outer_classname = "RPCProtos"; option java_generate_equals_and_hash = true; option optimize_for = SPEED; +// See https://issues.apache.org/jira/browse/HBASE-7533 for high-level +// description of RPC specification. +// +// On connection setup, the client sends six bytes of preamble -- a four +// byte magic, a byte of version, and a byte of authentication type. +// +// We then send a "ConnectionHeader" protobuf of user information and the +// 'protocol' or 'service' that is to be run over this connection. This +// connection header protobuf is prefaced by an int that holds the length +// of this connection header (this is NOT a varint). The pb connection header +// is sent with Message#writeTo. The server throws an exception if it doesn't +// like what it was sent noting what it is objecting too. Otherwise, the server +// says nothing and is open for business. +// +// Hereafter the client makes requests and the server returns responses. +// +// Requests look like this: +// +// +// +// +// +// +// ...where the Request Message is whatever the method name stipulated in +// the RequestHeader expects; e.g. if the method is a scan, then the pb +// Request Message is a GetRequest, or a ScanRequest. One +// or more EncodedDataBlocks will optionally follow. The presence of a +// Request param Message and/or a EncodedDataBlock will be noted in the +// request Header including its type. +// +// Response is the mirror of the request: +// +// +// +// +// +// +// ...where the Response Message is the response type that goes with the +// method specified when making the request and the follow on data blocks may +// or may not be there -- read the header to find out if following including +// their type. If an exception, it will be included inside the Response Header. +// +// Any time we write a pb, we do it with Message#writeDelimitedTo EXCEPT when +// the connection header is sent; this is prefaced by an int with its length +// and the pb connection header is then written with Message#writeTo. +// +// Currently the EncodedDataBlock passing is TODO +// message UserInformation { required string effectiveUser = 1; optional string realUser = 2; } +// This is sent on connection setup after connection preamble. message ConnectionHeader { - /** User Info beyond what is established at connection establishment - * (applies to secure HBase setup) - */ optional UserInformation userInfo = 1; - /** Protocol name for next rpc layer - * the client created a proxy with this protocol name - */ optional string protocol = 2 [default = "org.apache.hadoop.hbase.client.ClientProtocol"]; + // TODO: What DataBlockEncoding we can accept + // TODO: What connection compressions we can do; i.e. netty gzip'ing of all comm. } - -/** - * The RPC request header - */ -message RpcRequestHeader { - /** Monotonically increasing callId, mostly to keep track of RPCs */ - required uint32 callId = 1; - optional RPCTInfo tinfo = 2; +// Metadata on optional EncodedDataBlock +message EncodedDataBlockMeta { + // TODO: Metadata on blocks. } -/** - * The RPC request body - */ -message RpcRequestBody { - /** Name of the RPC method */ - required string methodName = 1; - /** Bytes corresponding to the client protobuf request. This is the actual - * bytes corresponding to the RPC request argument. - */ - optional bytes request = 2; +// Metadata on optional Request Parameter +message RequestParamMeta { +} - /** Some metainfo about the request. Helps us to treat RPCs with - * different priorities. For now this is just the classname of the request - * proto object. - */ - optional string requestClassName = 4; +// Metadata on optional Response (optional because could get exception instead) +message ResponseMeta { } -/** - * The RPC response header - */ -message RpcResponseHeader { - /** Echo back the callId the client sent */ - required uint32 callId = 1; - /** Did the RPC execution encounter an error at the server */ - enum Status { - SUCCESS = 0; - ERROR = 1; - FATAL = 2; - } - required Status status = 2; +// At the RPC layer, this message is used to carry +// the server side exception to the RPC client. +message ExceptionResponse { + // Class name of the exception thrown from the server + optional string exceptionClassName = 1; + // Exception stack trace from the server side + optional string stackTrace = 2; + // Optional hostname. Filled in for some exceptions such as region moved + // where exception gives clue on where the region may have moved. + optional string hostname = 3; + optional int32 port = 4; + // Set if we are NOT to retry on receipt of this exception + optional bool doNotRetry = 5; } -/** - * The RPC response body - */ -message RpcResponseBody { - /** Optional response bytes. This is the actual bytes corresponding to the - * return value of the invoked RPC. - */ - optional bytes response = 1; + +// Header sent making a request. +message RequestHeader { + // Monotonically increasing callId to keep track of RPC requests and their response + optional uint32 callId = 1; + optional RPCTInfo traceInfo = 2; + optional string methodName = 3; + // If present, then a pb Message param follows. + optional RequestParamMeta requestParamMeta = 4; + // If present, then an encoded data block follows. + optional EncodedDataBlockMeta encodedDataMeta = 5; + // TODO: Have client specify priority } -/** - * At the RPC layer, this message is used to indicate - * the server side exception to the RPC client. - * - * HBase RPC client throws an exception indicated - * by exceptionName with the stackTrace. - */ -message RpcException { - /** Class name of the exception thrown from the server */ - required string exceptionName = 1; - /** Exception stack trace from the server side */ - optional string stackTrace = 2; +message ResponseHeader { + optional uint32 callId = 1; + // If present, then an request threw an exception + optional ExceptionResponse exception = 2; + // If present, then a pb response Message follows. + optional ResponseMeta responseMeta = 3; + // If present, then an encoded data block follows. + optional EncodedDataBlockMeta encodedDataMeta = 4; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java index 8a4999a..2a6f291 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java @@ -38,7 +38,6 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos; import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.io.DataOutputBuffer; /** * Implements the scanner interface for the HBase client. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java index 85be8e6..1af0d9b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java @@ -43,6 +43,7 @@ import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.net.DNS; import com.google.protobuf.ServiceException; +import com.google.protobuf.TextFormat; /** * Retries scanner operations such as create, next, etc. @@ -55,7 +56,7 @@ public class ScannerCallable extends ServerCallable { = "hbase.client.log.scanner.latency.cutoff"; public static final String LOG_SCANNER_ACTIVITY = "hbase.client.log.scanner.activity"; - private static final Log LOG = LogFactory.getLog(ScannerCallable.class); + public static final Log LOG = LogFactory.getLog(ScannerCallable.class); private long scannerId = -1L; private boolean instantiated = false; private boolean closed = false; @@ -136,9 +137,10 @@ public class ScannerCallable extends ServerCallable { this.scannerId = openScanner(); } else { Result [] rrs = null; + ScanRequest request = null; try { incRPCcallsMetrics(); - ScanRequest request = + request = RequestConverter.buildScanRequest(scannerId, caching, false, nextCallSeq); ScanResponse response = null; try { @@ -175,8 +177,7 @@ public class ScannerCallable extends ServerCallable { updateResultsMetrics(response); } catch (IOException e) { if (logScannerActivity) { - LOG.info("Got exception in fetching from scanner=" - + scannerId, e); + LOG.info("Got exception making request " + TextFormat.shortDebugString(request), e); } IOException ioe = e; if (e instanceof RemoteException) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java index ed44a6d..e7fe436 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java @@ -23,7 +23,6 @@ import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; -import java.io.EOFException; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; @@ -56,14 +55,11 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.IpcProtocol; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseBody; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestParamMeta; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation; import org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo; import org.apache.hadoop.hbase.security.HBaseSaslRpcClient; @@ -89,18 +85,16 @@ import org.apache.hadoop.security.token.TokenSelector; import org.cloudera.htrace.Span; import org.cloudera.htrace.Trace; -import com.google.protobuf.CodedOutputStream; import com.google.protobuf.Message; import com.google.protobuf.Message.Builder; +import com.google.protobuf.TextFormat; -/** A client for an IPC service. IPC calls take a single Protobuf message as a - * parameter, and return a single Protobuf message as their value. A service runs on +/** + * A client for an IPC service. IPC calls take a single Protobuf message as a + * request and returns a single Protobuf message as result. A service runs on * a port and is defined by a parameter class and a value class. * - *

This is the org.apache.hadoop.ipc.Client renamed as HBaseClient and - * moved into this package so can access package-private methods. - * * @see HBaseServer */ @InterfaceAudience.Private @@ -138,6 +132,12 @@ public class HBaseClient { public final static int FAILED_SERVER_EXPIRY_DEFAULT = 2000; /** + * Set into the response if we have a response message to return. Currently has no + * content so can just reuse a static Message instance. + */ + static final RequestParamMeta REQUEST_PARAM_META = RequestParamMeta.newBuilder().build(); + + /** * A class to manage a list of servers that failed recently. */ static class FailedServers { @@ -186,12 +186,10 @@ public class HBaseClient { return false; } - } + @SuppressWarnings("serial") public static class FailedServerException extends IOException { - private static final long serialVersionUID = -4744376109431464127L; - public FailedServerException(String s) { super(s); } @@ -203,6 +201,8 @@ public class HBaseClient { * @param conf Configuration * @param pingInterval the ping interval */ + // Any reason we couldn't just do tcp keepalive instead of this pingery? + // St.Ack 20130121 public static void setPingInterval(Configuration conf, int pingInterval) { conf.setInt(PING_INTERVAL_NAME, pingInterval); } @@ -237,20 +237,28 @@ public class HBaseClient { /** A call waiting for a value. */ protected class Call { final int id; // call id - final RpcRequestBody param; // rpc request object - Message value; // value, null if error + final Message param; // rpc request method param object + Message response; // value, null if error IOException error; // exception, null if value boolean done; // true when call is done long startTime; + final Method method; - protected Call(RpcRequestBody param) { + protected Call(final Method method, Message param) { this.param = param; + this.method = method; this.startTime = System.currentTimeMillis(); synchronized (HBaseClient.this) { this.id = counter++; } } + @Override + public String toString() { + return "callId: " + this.id + " methodName: " + this.method.getName() + " param {" + + (this.param != null? TextFormat.shortDebugString(this.param): "") + "}"; + } + /** Indicate when the call is complete and the * value or error are available. Notifies by default. */ protected synchronized void callComplete() { @@ -271,10 +279,10 @@ public class HBaseClient { /** Set the return value when there is no error. * Notify the caller the call is done. * - * @param value return value of the call. + * @param response return value of the call. */ - public synchronized void setValue(Message value) { - this.value = value; + public synchronized void setResponse(Message response) { + this.response = response; callComplete(); } @@ -370,17 +378,17 @@ public class HBaseClient { authMethod = AuthMethod.KERBEROS; } - if (LOG.isDebugEnabled()) + if (LOG.isDebugEnabled()) { LOG.debug("Use " + authMethod + " authentication for protocol " + protocol.getSimpleName()); - + } reloginMaxBackoff = conf.getInt("hbase.security.relogin.maxbackoff", 5000); this.remoteId = remoteId; ConnectionHeader.Builder builder = ConnectionHeader.newBuilder(); builder.setProtocol(protocol == null ? "" : protocol.getName()); UserInformation userInfoPB; - if ((userInfoPB = getUserInfoPB(ticket)) != null) { + if ((userInfoPB = getUserInfo(ticket)) != null) { builder.setUserInfo(userInfoPB); } this.header = builder.build(); @@ -392,7 +400,7 @@ public class HBaseClient { this.setDaemon(true); } - private UserInformation getUserInfoPB(UserGroupInformation ugi) { + private UserInformation getUserInfo(UserGroupInformation ugi) { if (ugi == null || authMethod == AuthMethod.DIGEST) { // Don't send user for token auth return null; @@ -615,6 +623,7 @@ public class HBaseClient { * since last I/O activity is equal to or greater than the ping interval */ protected synchronized void sendPing() throws IOException { + // Can we do tcp keepalive instead of this pinging? long curTime = System.currentTimeMillis(); if ( curTime - lastActivity.get() >= pingInterval) { lastActivity.set(curTime); @@ -629,12 +638,11 @@ public class HBaseClient { @Override public void run() { if (LOG.isDebugEnabled()) - LOG.debug(getName() + ": starting, having connections " - + connections.size()); + LOG.debug("Starting, having connections " + connections.size()); try { while (waitForWork()) {//wait here for work - read or close connection - receiveResponse(); + readResponse(); } } catch (Throwable t) { LOG.warn("Unexpected exception receiving call responses", t); @@ -644,8 +652,7 @@ public class HBaseClient { close(); if (LOG.isDebugEnabled()) - LOG.debug(getName() + ": stopped, remaining connections " - + connections.size()); + LOG.debug("Stopped, remaining connections " + connections.size()); } private synchronized void disposeSasl() { @@ -777,7 +784,8 @@ public class HBaseClient { setupConnection(); InputStream inStream = NetUtils.getInputStream(socket); OutputStream outStream = NetUtils.getOutputStream(socket); - writeRpcHeader(outStream); + // Write out the preamble -- MAGIC, version, and auth to use. + writeConnectionHeaderPreamble(outStream); if (useSasl) { final InputStream in2 = inStream; final OutputStream out2 = outStream; @@ -814,11 +822,10 @@ public class HBaseClient { useSasl = false; } } - this.in = new DataInputStream(new BufferedInputStream - (new PingInputStream(inStream))); - this.out = new DataOutputStream - (new BufferedOutputStream(outStream)); - writeHeader(); + this.in = new DataInputStream(new BufferedInputStream(new PingInputStream(inStream))); + this.out = new DataOutputStream(new BufferedOutputStream(outStream)); + // Now write out the connection header + writeConnectionHeader(); // update last activity time touch(); @@ -842,24 +849,23 @@ public class HBaseClient { } } - /* Write the RPC header */ - private void writeRpcHeader(OutputStream outStream) throws IOException { - DataOutputStream out = new DataOutputStream(new BufferedOutputStream(outStream)); - // Write out the header, version and authentication method - out.write(HBaseServer.HEADER.array()); - out.write(HBaseServer.CURRENT_VERSION); - authMethod.write(out); - out.flush(); + /** + * Write the RPC header: + */ + private void writeConnectionHeaderPreamble(OutputStream outStream) throws IOException { + outStream.write(HBaseServer.HEADER.array()); + outStream.write(HBaseServer.CURRENT_VERSION); + outStream.write(authMethod.code); // This will write out a single byte. + outStream.flush(); } /** - * Write the protocol header for each connection + * Write the connection header. * Out is not synchronized because only the first thread does this. */ - private void writeHeader() throws IOException { - // Write out the ConnectionHeader - out.writeInt(header.getSerializedSize()); - header.writeTo(out); + private void writeConnectionHeader() throws IOException { + this.out.writeInt(this.header.getSerializedSize()); + this.header.writeTo(this.out); } /** Close the connection. */ @@ -904,42 +910,39 @@ public class HBaseClient { cleanupCalls(); } if (LOG.isDebugEnabled()) - LOG.debug(getName() + ": closed"); + LOG.debug("Closed"); } - /* Initiates a call by sending the parameter to the remote server. + /** + * Initiates a call by sending the parameter to the remote server. * Note: this is not called from the Connection thread, but by other * threads. + * @param call + * @see #readResponse() */ - protected void sendParam(Call call) { - if (shouldCloseConnection.get()) { - return; - } + protected void writeRequest(Call call) { + if (shouldCloseConnection.get()) return; + if (LOG.isDebugEnabled()) LOG.debug("Sending " + call); try { - if (LOG.isDebugEnabled()) - LOG.debug(getName() + " sending #" + call.id); - - RpcRequestHeader.Builder headerBuilder = RPCProtos.RpcRequestHeader.newBuilder(); - headerBuilder.setCallId(call.id); - + RequestHeader.Builder builder = RequestHeader.newBuilder(); + builder.setCallId(call.id); if (Trace.isTracing()) { Span s = Trace.currentTrace(); - headerBuilder.setTinfo(RPCTInfo.newBuilder() - .setParentId(s.getSpanId()) - .setTraceId(s.getTraceId())); + builder.setTraceInfo(RPCTInfo.newBuilder(). + setParentId(s.getSpanId()).setTraceId(s.getTraceId())); } - + builder.setMethodName(call.method.getName()); + builder.setRequestParamMeta(REQUEST_PARAM_META); + RequestHeader requestHeader = builder.build(); //noinspection SynchronizeOnNonFinalField synchronized (this.out) { // FindBugs IS2_INCONSISTENT_SYNC - RpcRequestHeader header = headerBuilder.build(); - int serializedHeaderSize = header.getSerializedSize(); - int requestSerializedSize = call.param.getSerializedSize(); - this.out.writeInt(serializedHeaderSize + - CodedOutputStream.computeRawVarint32Size(serializedHeaderSize) + - requestSerializedSize + - CodedOutputStream.computeRawVarint32Size(requestSerializedSize)); - header.writeDelimitedTo(this.out); + // Need to write out our total length first so server can read + // it all in in one fell swoop. + int totalSize = IPCUtil.getTotalSizeWhenWrittenDelimited(requestHeader, call.param); + this.out.writeInt(totalSize); + requestHeader.writeDelimitedTo(this.out); call.param.writeDelimitedTo(this.out); + // TODO: Add in encoded data blocks. this.out.flush(); } } catch(IOException e) { @@ -947,7 +950,6 @@ public class HBaseClient { } } - private Method getMethod(Class protocol, String methodName) { Method method = methodInstances.get(methodName); @@ -968,65 +970,50 @@ public class HBaseClient { /* Receive a response. * Because only one receiver, so no synchronization on in. */ - protected void receiveResponse() { - if (shouldCloseConnection.get()) { - return; - } + protected void readResponse() { + if (shouldCloseConnection.get()) return; touch(); - try { // See HBaseServer.Call.setResponse for where we write out the response. - // It writes the call.id (int), a boolean signifying any error (and if - // so the exception name/trace), and the response bytes - - // Read the call id. - RpcResponseHeader response = RpcResponseHeader.parseDelimitedFrom(in); - if (response == null) { - // When the stream is closed, protobuf doesn't raise an EOFException, - // instead, it returns a null message object. - throw new EOFException(); - } - int id = response.getCallId(); - if (LOG.isDebugEnabled()) - LOG.debug(getName() + " got value #" + id); - Call call = calls.get(id); + // Total size of the response. + int totalSize = in.readInt(); - Status status = response.getStatus(); - if (status == Status.SUCCESS) { + // Read the header + byte [] bytes = IPCUtil.getDelimitedMessageBytes(in); + ResponseHeader responseHeader = ResponseHeader.parseFrom(bytes); + int id = responseHeader.getCallId(); + if (LOG.isDebugEnabled()) { + LOG.debug("Got response header " + TextFormat.shortDebugString(responseHeader)); + } + Call call = calls.get(id); + if (responseHeader.hasException()) { + ExceptionResponse exceptionResponse = responseHeader.getException(); + RemoteException re = createRemoteException(exceptionResponse); + if (isFatalConnectionException(exceptionResponse)) { + markClosed(re); + } else { + if (call != null) call.setException(re); + } + } else { Message rpcResponseType; try { - rpcResponseType = ProtobufRpcClientEngine.Invoker.getReturnProtoType( - getMethod(remoteId.getProtocol(), - call.param.getMethodName())); + // TODO: Why pb engine pollution in here in this class? FIX. + rpcResponseType = + ProtobufRpcClientEngine.Invoker.getReturnProtoType( + getMethod(remoteId.getProtocol(), call.method.getName())); } catch (Exception e) { throw new RuntimeException(e); //local exception } Builder builder = rpcResponseType.newBuilderForType(); - builder.mergeDelimitedFrom(in); + bytes = IPCUtil.getDelimitedMessageBytes(in); + builder.mergeFrom(bytes); Message value = builder.build(); // it's possible that this call may have been cleaned up due to a RPC // timeout, so check if it still exists before setting the value. - if (call != null) { - call.setValue(value); - } - calls.remove(id); - } else if (status == Status.ERROR) { - RpcException exceptionResponse = RpcException.parseDelimitedFrom(in); - if (call != null) { - //noinspection ThrowableInstanceNeverThrown - call.setException(new RemoteException( - exceptionResponse.getExceptionName(), - exceptionResponse.getStackTrace())); - calls.remove(id); - } - } else if (status == Status.FATAL) { - RpcException exceptionResponse = RpcException.parseDelimitedFrom(in); - // Close the connection - markClosed(new RemoteException( - exceptionResponse.getExceptionName(), - exceptionResponse.getStackTrace())); + if (call != null) call.setResponse(value); } + if (call != null) calls.remove(id); } catch (IOException e) { if (e instanceof SocketTimeoutException && remoteId.rpcTimeout > 0) { // Clean up open calls but don't treat this as a fatal condition, @@ -1045,6 +1032,30 @@ public class HBaseClient { } } + /** + * @param e + * @return True if the exception is a fatal connection exception. + */ + private boolean isFatalConnectionException(final ExceptionResponse e) { + return e.getExceptionClassName(). + equals(FatalConnectionException.class.getName()); + } + + /** + * @param e + * @return RemoteException made from passed e + */ + private RemoteException createRemoteException(final ExceptionResponse e) { + String innerExceptionClassName = e.getExceptionClassName(); + boolean doNotRetry = e.getDoNotRetry(); + return e.hasHostname()? + // If a hostname then add it to the RemoteWithExtrasException + new RemoteWithExtrasException(innerExceptionClassName, + e.getStackTrace(), e.getHostname(), e.getPort(), doNotRetry): + new RemoteWithExtrasException(innerExceptionClassName, + e.getStackTrace(), doNotRetry); + } + protected synchronized void markClosed(IOException e) { if (shouldCloseConnection.compareAndSet(false, true)) { closeException = e; @@ -1105,53 +1116,13 @@ public class HBaseClient { /** * Client-side call timeout */ + @SuppressWarnings("serial") public static class CallTimeoutException extends IOException { public CallTimeoutException(final String msg) { super(msg); } } - /** Call implementation used for parallel calls. */ - protected class ParallelCall extends Call { - private final ParallelResults results; - protected final int index; - - public ParallelCall(RpcRequestBody param, ParallelResults results, int index) { - super(param); - this.results = results; - this.index = index; - } - - /** Deliver result to result collector. */ - @Override - protected void callComplete() { - results.callComplete(this); - } - } - - /** Result collector for parallel calls. */ - protected static class ParallelResults { - protected final Message[] values; - protected int size; - protected int count; - - public ParallelResults(int size) { - this.values = new RpcResponseBody[size]; - this.size = size; - } - - /* - * Collect a result. - */ - synchronized void callComplete(ParallelCall call) { - // FindBugs IS2_INCONSISTENT_SYNC - values[call.index] = call.value; // store the value - count++; // count it - if (count == size) // if all values are in - notify(); // then notify waiting caller - } - } - /** * Construct an IPC client whose values are of the {@link Message} * class. @@ -1252,36 +1223,17 @@ public class HBaseClient { } /** Make a call, passing param, to the IPC server running at - * address, returning the value. Throws exceptions if there are - * network problems or if the remote code threw an exception. - * @param param RpcRequestBody parameter - * @param address network address - * @return Message - * @throws IOException e - */ - public Message call(RpcRequestBody param, InetSocketAddress address) - throws IOException, InterruptedException { - return call(param, address, null, 0); - } - - public Message call(RpcRequestBody param, InetSocketAddress addr, - User ticket, int rpcTimeout) - throws IOException, InterruptedException { - return call(param, addr, null, ticket, rpcTimeout); - } - - /** Make a call, passing param, to the IPC server running at * address which is servicing the protocol protocol, * with the ticket credentials, returning the value. * Throws exceptions if there are network problems or if the remote code * threw an exception. */ - public Message call(RpcRequestBody param, InetSocketAddress addr, + public Message call(Method method, Message param, InetSocketAddress addr, Class protocol, User ticket, int rpcTimeout) throws InterruptedException, IOException { - Call call = new Call(param); + Call call = new Call(method, param); Connection connection = getConnection(addr, protocol, ticket, rpcTimeout, call); - connection.sendParam(call); // send the parameter + connection.writeRequest(call); // send the parameter boolean interrupted = false; //noinspection SynchronizationOnLocalVariableOrMethodParameter synchronized (call) { @@ -1307,7 +1259,7 @@ public class HBaseClient { // local exception throw wrapException(addr, call.error); } - return call.value; + return call.response; } } @@ -1342,43 +1294,6 @@ public class HBaseClient { } } - /** Makes a set of calls in parallel. Each parameter is sent to the - * corresponding address. When all values are available, or have timed out - * or errored, the collected results are returned in an array. The array - * contains nulls for calls that timed out or errored. */ - public Message[] call(RpcRequestBody[] params, InetSocketAddress[] addresses, - Class protocol, - User ticket) - throws IOException, InterruptedException { - if (addresses.length == 0) return new RpcResponseBody[0]; - - ParallelResults results = new ParallelResults(params.length); - // TODO this synchronization block doesnt make any sense, we should possibly fix it - //noinspection SynchronizationOnLocalVariableOrMethodParameter - synchronized (results) { - for (int i = 0; i < params.length; i++) { - ParallelCall call = new ParallelCall(params[i], results, i); - try { - Connection connection = - getConnection(addresses[i], protocol, ticket, 0, call); - connection.sendParam(call); // send each parameter - } catch (IOException e) { - // log errors - LOG.info("Calling "+addresses[i]+" caught: " + - e.getMessage(),e); - results.size--; // wait for one fewer result - } - } - while (results.count != results.size) { - try { - results.wait(); // wait for all results - } catch (InterruptedException ignored) {} - } - - return results.values; - } - } - /* Get a connection from the pool, or create a new one and add it to the * pool. Connections to a given host/port are reused. */ protected Connection getConnection(InetSocketAddress addr, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClientRPC.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClientRPC.java index 16451c4..928e48f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClientRPC.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClientRPC.java @@ -20,14 +20,9 @@ package org.apache.hadoop.hbase.ipc; import java.io.IOException; -import java.lang.reflect.Proxy; import java.net.ConnectException; import java.net.InetSocketAddress; import java.net.SocketTimeoutException; -import java.util.HashMap; -import java.util.Map; - -import javax.net.SocketFactory; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -36,9 +31,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.IpcProtocol; import org.apache.hadoop.hbase.client.RetriesExhaustedException; -import org.apache.hadoop.hbase.security.User; -import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.util.ReflectionUtils; /** * An RPC implementation. This class provides the client side. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java index 8b075fa..3e7022b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java @@ -23,9 +23,9 @@ import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHO import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; +import java.lang.reflect.Method; import java.net.BindException; import java.net.InetAddress; import java.net.InetSocketAddress; @@ -66,16 +66,17 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.IpcProtocol; +import org.apache.hadoop.hbase.RegionMovedException; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; import org.apache.hadoop.hbase.monitoring.TaskMonitor; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ExceptionResponse; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseMeta; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation; import org.apache.hadoop.hbase.security.HBaseSaslRpcServer; import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.AuthMethod; @@ -84,11 +85,12 @@ import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslGssCallbackHandle import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslStatus; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.ByteBufferOutputStream; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableUtils; -import org.apache.hadoop.ipc.RPC.VersionMismatch; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; @@ -109,7 +111,10 @@ import org.cloudera.htrace.impl.NullSpan; import com.google.common.base.Function; import com.google.common.util.concurrent.ThreadFactoryBuilder; +import com.google.protobuf.CodedInputStream; import com.google.protobuf.Message; +import com.google.protobuf.Message.Builder; +import com.google.protobuf.TextFormat; // Uses Writables doing sasl /** A client for an IPC service. IPC calls take a single Protobuf message as a @@ -123,13 +128,16 @@ import com.google.protobuf.Message; */ @InterfaceAudience.Private public abstract class HBaseServer implements RpcServer { + public static final Log LOG = LogFactory.getLog("org.apache.hadoop.ipc.HBaseServer"); + protected static final Log TRACELOG = + LogFactory.getLog("org.apache.hadoop.ipc.HBaseServer.trace"); private final boolean authorize; protected boolean isSecurityEnabled; /** * The first four bytes of Hadoop RPC connections */ - public static final ByteBuffer HEADER = ByteBuffer.wrap("hrpc".getBytes()); - public static final byte CURRENT_VERSION = 5; + public static final ByteBuffer HEADER = ByteBuffer.wrap("HBas".getBytes()); + public static final byte CURRENT_VERSION = 0; /** * How many calls/handler are allowed in the queue. @@ -153,11 +161,6 @@ public abstract class HBaseServer implements RpcServer { private AtomicInteger delayedCalls; - public static final Log LOG = - LogFactory.getLog("org.apache.hadoop.ipc.HBaseServer"); - protected static final Log TRACELOG = - LogFactory.getLog("org.apache.hadoop.ipc.HBaseServer.trace"); - private static final String AUTH_FAILED_FOR = "Auth failed for "; private static final String AUTH_SUCCESSFUL_FOR = "Auth successful for "; private static final Log AUDITLOG = @@ -168,10 +171,17 @@ public abstract class HBaseServer implements RpcServer { protected static final ThreadLocal SERVER = new ThreadLocal(); private volatile boolean started = false; + private static final MethodCache methodCache = new MethodCache(); private static final Map> PROTOCOL_CACHE = new ConcurrentHashMap>(); + /** + * Set into the response if we have a response message to return. Currently has no + * content so can just reuse a static Message instance. + */ + static final ResponseMeta RESPONSE_META = ResponseMeta.newBuilder().build(); + @SuppressWarnings("unchecked") static Class getProtocolClass( String protocolName, Configuration conf) @@ -309,7 +319,8 @@ public abstract class HBaseServer implements RpcServer { /** A call queued for handling. */ protected class Call implements RpcCallContext { protected int id; // the client's call id - protected RpcRequestBody rpcRequestBody; // the parameter passed + protected Method method; + protected Message param; // the parameter passed protected Connection connection; // connection to client protected long timestamp; // the time received when response is null // the time served when response is not null @@ -322,10 +333,11 @@ public abstract class HBaseServer implements RpcServer { protected boolean isError; protected TraceInfo tinfo; - public Call(int id, RpcRequestBody rpcRequestBody, Connection connection, + public Call(int id, Method method, Message param, Connection connection, Responder responder, long size, TraceInfo tinfo) { this.id = id; - this.rpcRequestBody = rpcRequestBody; + this.method = method; + this.param = param; this.connection = connection; this.timestamp = System.currentTimeMillis(); this.response = null; @@ -338,52 +350,68 @@ public abstract class HBaseServer implements RpcServer { @Override public String toString() { - return rpcRequestBody.toString() + " from " + connection.toString(); + return "callId: " + this.id + " methodName: " + this.method.getName() + " param: " + + (this.param != null? TextFormat.shortDebugString(this.param): "") + + " from " + connection.toString(); } protected synchronized void setSaslTokenResponse(ByteBuffer response) { this.response = response; } - protected synchronized void setResponse(Object value, Status status, - String errorClass, String error) { - if (this.isError) - return; - if (errorClass != null) { - this.isError = true; - } - - ByteBufferOutputStream buf = null; - if (value != null) { - buf = new ByteBufferOutputStream(((Message)value).getSerializedSize()); - } else { - buf = new ByteBufferOutputStream(BUFFER_INITIAL_SIZE); - } - DataOutputStream out = new DataOutputStream(buf); + protected synchronized void setResponse(Object m, Throwable t, String errorMsg) { + if (this.isError) return; + if (t != null) this.isError = true; + ByteBufferOutputStream bbos = null; try { - RpcResponseHeader.Builder builder = RpcResponseHeader.newBuilder(); + ResponseHeader.Builder headerBuilder = ResponseHeader.newBuilder(); + // Presume it a pb Message. Could be null. + Message result = (Message)m; // Call id. - builder.setCallId(this.id); - builder.setStatus(status); - builder.build().writeDelimitedTo(out); - if (error != null) { - RpcException.Builder b = RpcException.newBuilder(); - b.setExceptionName(errorClass); - b.setStackTrace(error); - b.build().writeDelimitedTo(out); - } else { - if (value != null) { - ((Message)value).writeDelimitedTo(out); + headerBuilder.setCallId(this.id); + if (t != null) { + ExceptionResponse.Builder exceptionBuilder = ExceptionResponse.newBuilder(); + exceptionBuilder.setExceptionClassName(t.getClass().getName()); + exceptionBuilder.setStackTrace(errorMsg); + exceptionBuilder.setDoNotRetry(t instanceof DoNotRetryIOException); + if (t instanceof RegionMovedException) { + // Special casing for this exception. This is only one carrying a payload. + // Do this instead of build a generic system for allowing exceptions carry + // any kind of payload. + RegionMovedException rme = (RegionMovedException)t; + exceptionBuilder.setHostname(rme.getHostname()); + exceptionBuilder.setPort(rme.getPort()); } + // Set the exception as the result of the method invocation. + headerBuilder.setException(exceptionBuilder.build()); + } + if (result != null) { + headerBuilder.setResponseMeta(RESPONSE_META); } + Message header = headerBuilder.build(); + int totalSize = IPCUtil.getTotalSizeWhenWrittenDelimited(header, result); + // Allocate total size plus an int to hold the total size returned. + bbos = new ByteBufferOutputStream(Bytes.SIZEOF_INT + totalSize); + // Write total size int + bbos.write(Bytes.toBytes(totalSize)); + // Write out the pb header. + header.writeDelimitedTo(bbos); + // Write out the pb result if one. + if (result != null) result.writeDelimitedTo(bbos); if (connection.useWrap) { - wrapWithSasl(buf); + wrapWithSasl(bbos); } + bbos.close(); + if (LOG.isDebugEnabled()) LOG.debug("header " + TextFormat.shortDebugString(header) + + ", result " + (result != null? TextFormat.shortDebugString(result): "null")); } catch (IOException e) { LOG.warn("Exception while creating response " + e); } - ByteBuffer bb = buf.getByteBuffer(); - bb.position(0); + ByteBuffer bb = null; + if (bbos != null) { + bb = bbos.getByteBuffer(); + bb.position(0); + } this.response = bb; } @@ -415,8 +443,9 @@ public abstract class HBaseServer implements RpcServer { assert this.delayReturnValue || result == null; this.delayResponse = false; delayedCalls.decrementAndGet(); - if (this.delayReturnValue) - this.setResponse(result, Status.SUCCESS, null, null); + if (this.delayReturnValue) { + this.setResponse(result, null, null); + } this.responder.doRespond(this); } @@ -439,8 +468,7 @@ public abstract class HBaseServer implements RpcServer { @Override public synchronized void endDelayThrowing(Throwable t) throws IOException { - this.setResponse(null, Status.ERROR, t.getClass().toString(), - StringUtils.stringifyException(t)); + this.setResponse(null, t, StringUtils.stringifyException(t)); this.delayResponse = false; this.sendResponseIfReady(); } @@ -542,7 +570,7 @@ public abstract class HBaseServer implements RpcServer { try { readSelector.close(); } catch (IOException ioe) { - LOG.error("Error closing read selector in " + getName(), ioe); + LOG.error("Error closing read selector", ioe); } } } @@ -569,8 +597,7 @@ public abstract class HBaseServer implements RpcServer { } } catch (InterruptedException e) { if (running) { // unexpected -- log it - LOG.info(getName() + " unexpectedly interrupted: " + - StringUtils.stringifyException(e)); + LOG.info("Unexpectedly interrupted: " + StringUtils.stringifyException(e)); } } catch (IOException ex) { LOG.error("Error in Reader", ex); @@ -637,7 +664,7 @@ public abstract class HBaseServer implements RpcServer { } if (c.timedOut(currentTime)) { if (LOG.isDebugEnabled()) - LOG.debug(getName() + ": disconnecting client " + c.getHostAddress()); + LOG.debug("Disconnecting client " + c.getHostAddress()); closeConnection(c); numNuked++; end--; @@ -653,7 +680,7 @@ public abstract class HBaseServer implements RpcServer { @Override public void run() { - LOG.info(getName() + ": starting"); + LOG.info("Starting"); SERVER.set(HBaseServer.this); while (running) { @@ -676,7 +703,7 @@ public abstract class HBaseServer implements RpcServer { } catch (OutOfMemoryError e) { if (errorHandler != null) { if (errorHandler.checkOOME(e)) { - LOG.info(getName() + ": exiting on OOME"); + LOG.info("Exiting on OOME"); closeCurrentConnection(key, e); cleanupConnections(true); return; @@ -695,7 +722,7 @@ public abstract class HBaseServer implements RpcServer { } cleanupConnections(false); } - LOG.info("Stopping " + this.getName()); + LOG.info("Stopping"); synchronized (this) { try { @@ -718,7 +745,7 @@ public abstract class HBaseServer implements RpcServer { Connection c = (Connection)key.attachment(); if (c != null) { if (LOG.isDebugEnabled()) { - LOG.debug(getName() + ": disconnecting client " + c.getHostAddress() + + LOG.debug("Disconnecting client " + c.getHostAddress() + (e != null ? " on error " + e.getMessage() : "")); } closeConnection(c); @@ -752,7 +779,7 @@ public abstract class HBaseServer implements RpcServer { numConnections++; } if (LOG.isDebugEnabled()) - LOG.debug("Server connection from " + c.toString() + + LOG.debug("Connection from " + c.toString() + "; # active connections: " + numConnections + "; # queued calls: " + callQueue.size()); } finally { @@ -768,24 +795,23 @@ public abstract class HBaseServer implements RpcServer { return; } c.setLastContact(System.currentTimeMillis()); - try { count = c.readAndProcess(); } catch (InterruptedException ieo) { throw ieo; } catch (Exception e) { - LOG.warn(getName() + ": readAndProcess threw exception " + e + ". Count of bytes read: " + count, e); + LOG.warn("ReadAndProcess threw exception " + e + ". Count of bytes read: " + + count, e); count = -1; //so that the (count < 0) block is executed } if (count < 0) { - if (LOG.isDebugEnabled()) - LOG.debug(getName() + ": disconnecting client " + - c.getHostAddress() + ". Number of active connections: "+ - numConnections); + if (LOG.isDebugEnabled()) { + LOG.debug("Disconnecting client " + c.getHostAddress() + + ". Number of active connections: " + numConnections); + } closeConnection(c); // c = null; - } - else { + } else { c.setLastContact(System.currentTimeMillis()); } } @@ -799,7 +825,7 @@ public abstract class HBaseServer implements RpcServer { try { acceptChannel.socket().close(); } catch (IOException e) { - LOG.info(getName() + ":Exception in closing listener socket. " + e); + LOG.info("Exception in closing listener socket. " + e); } } readPool.shutdownNow(); @@ -827,16 +853,16 @@ public abstract class HBaseServer implements RpcServer { @Override public void run() { - LOG.info(getName() + ": starting"); + LOG.info("Starting"); SERVER.set(HBaseServer.this); try { doRunLoop(); } finally { - LOG.info("Stopping " + this.getName()); + LOG.info("Stopping"); try { writeSelector.close(); } catch (IOException ioe) { - LOG.error("Couldn't close write selector in " + this.getName(), ioe); + LOG.error("Couldn't close write selector", ioe); } } } @@ -857,7 +883,7 @@ public abstract class HBaseServer implements RpcServer { doAsyncWrite(key); } } catch (IOException e) { - LOG.info(getName() + ": doAsyncWrite threw exception " + e); + LOG.info("asyncWrite", e); } } long now = System.currentTimeMillis(); @@ -895,7 +921,7 @@ public abstract class HBaseServer implements RpcServer { } catch (OutOfMemoryError e) { if (errorHandler != null) { if (errorHandler.checkOOME(e)) { - LOG.info(getName() + ": exiting on OOME"); + LOG.info("Exiting on OOME"); return; } } else { @@ -912,7 +938,7 @@ public abstract class HBaseServer implements RpcServer { StringUtils.stringifyException(e)); } } - LOG.info("Stopping " + this.getName()); + LOG.info("Stopped"); } private void doAsyncWrite(SelectionKey key) throws IOException { @@ -982,10 +1008,6 @@ public abstract class HBaseServer implements RpcServer { // call = responseQueue.removeFirst(); SocketChannel channel = call.connection.channel; - if (LOG.isDebugEnabled()) { - LOG.debug(getName() + ": responding to #" + call.id + " from " + - call.connection); - } // // Send as much data as we can in the non-blocking fashion // @@ -1002,8 +1024,7 @@ public abstract class HBaseServer implements RpcServer { done = false; // more calls pending to be sent. } if (LOG.isDebugEnabled()) { - LOG.debug(getName() + ": responding to #" + call.id + " from " + - call.connection + " Wrote " + numBytes + " bytes."); + LOG.debug(call.toString() + " sent, wrote " + numBytes + " bytes."); } } else { // @@ -1019,16 +1040,14 @@ public abstract class HBaseServer implements RpcServer { done = true; } if (LOG.isDebugEnabled()) { - LOG.debug(getName() + ": responding to #" + call.id + " from " + - call.connection + " Wrote partial " + numBytes + - " bytes."); + LOG.debug(call.toString() + " partially sent, wrote " + numBytes + " bytes."); } } error = false; // everything went off well } } finally { if (error && call != null) { - LOG.warn(getName()+", call " + call + ": output error"); + LOG.warn(call.toString() + ": output error"); done = true; // error. no more data for this channel. closeConnection(call.connection); } @@ -1092,12 +1111,38 @@ public abstract class HBaseServer implements RpcServer { } } + @SuppressWarnings("serial") + public class CallQueueTooBigException extends IOException { + CallQueueTooBigException() { + super(); + } + } + + private Function, Integer> qosFunction = null; + + /** + * Gets the QOS level for this call. If it is higher than the highPriorityLevel and there + * are priorityHandlers available it will be processed in it's own thread set. + * + * @param newFunc + */ + @Override + public void setQosFunction(Function, Integer> newFunc) { + qosFunction = newFunc; + } + + protected int getQosLevel(Pair headerAndParam) { + if (qosFunction == null) return 0; + Integer res = qosFunction.apply(headerAndParam); + return res == null? 0: res; + } + /** Reads calls from a connection and queues them for handling. */ public class Connection { - private boolean rpcHeaderRead = false; //if initial signature and - //version are read - private boolean headerRead = false; //if the connection header that - //follows version is read. + // If initial preamble with version and magic has been read or not. + private boolean connectionPreambleRead = false; + // If the connection header has been read or not. + private boolean connectionHeaderReader = false; protected SocketChannel channel; private ByteBuffer data; private ByteBuffer dataLengthBuffer; @@ -1111,7 +1156,7 @@ public abstract class HBaseServer implements RpcServer { // disconnected, we can say where it used to connect to. protected String hostAddress; protected int remotePort; - ConnectionHeader header; + ConnectionHeader connectionHeader; Class protocol; protected UserGroupInformation user = null; private AuthMethod authMethod; @@ -1125,16 +1170,17 @@ public abstract class HBaseServer implements RpcServer { private boolean useWrap = false; // Fake 'call' for failed authorization response private static final int AUTHROIZATION_FAILED_CALLID = -1; - private final Call authFailedCall = new Call(AUTHROIZATION_FAILED_CALLID, - null, this, null, 0, null); + private final Call authFailedCall = + new Call(AUTHROIZATION_FAILED_CALLID, null, null, this, null, 0, null); private ByteArrayOutputStream authFailedResponse = new ByteArrayOutputStream(); // Fake 'call' for SASL context setup private static final int SASL_CALLID = -33; - private final Call saslCall = new Call(SASL_CALLID, null, this, null, 0, - null); + private final Call saslCall = + new Call(SASL_CALLID, null, null, this, null, 0, null); public UserGroupInformation attemptingUser = null; // user name before auth + public Connection(SocketChannel channel, long lastContact) { this.channel = channel; this.lastContact = lastContact; @@ -1354,49 +1400,54 @@ public abstract class HBaseServer implements RpcServer { } } + /** + * Read off the wire. + * @return Returns -1 if failure (and caller will close connection) else return how many + * bytes were read and processed. + * @throws IOException + * @throws InterruptedException + */ public int readAndProcess() throws IOException, InterruptedException { while (true) { - /* Read at most one RPC. If the header is not read completely yet - * then iterate until we read first RPC or until there is no data left. - */ + // Try and read in an int. If new connection, the int will hold the 'HBas' HEADER. If it + // does, read in the rest of the connection preamble, the version and the auth method. + // Else it will be length of the data to read (or -1 if a ping). We catch the integer + // length into the 4-byte this.dataLengthBuffer. int count = -1; if (dataLengthBuffer.remaining() > 0) { count = channelRead(channel, dataLengthBuffer); - if (count < 0 || dataLengthBuffer.remaining() > 0) + if (count < 0 || dataLengthBuffer.remaining() > 0) { return count; + } } - - if (!rpcHeaderRead) { - //Every connection is expected to send the header. + // If we have not read the connection setup preamble, look to see if that is on the wire. + if (!connectionPreambleRead) { + this.dataLengthBuffer.flip(); + if (!HEADER.equals(this.dataLengthBuffer)) { + return doBadPreambleHandling("Expected HEADER=" + Bytes.toStringBinary(HEADER.array()) + + " but received HEADER=" + Bytes.toStringBinary(this.dataLengthBuffer.array())); + } + // Read the next two bytes, the version and the auth to use. if (rpcHeaderBuffer == null) { + // Allocate two bytes to hold version and auth type. rpcHeaderBuffer = ByteBuffer.allocate(2); } count = channelRead(channel, rpcHeaderBuffer); if (count < 0 || rpcHeaderBuffer.remaining() > 0) { return count; } - int version = rpcHeaderBuffer.get(0); - byte[] method = new byte[] {rpcHeaderBuffer.get(1)}; - authMethod = AuthMethod.read(new DataInputStream( - new ByteArrayInputStream(method))); - dataLengthBuffer.flip(); - if (!HEADER.equals(dataLengthBuffer) || version != CURRENT_VERSION) { - LOG.warn("Incorrect header or version mismatch from " + - hostAddress + ":" + remotePort + - " got version " + version + - " expected version " + CURRENT_VERSION); - setupBadVersionResponse(version); - return -1; + int version = this.rpcHeaderBuffer.get(0); + byte authbyte = this.rpcHeaderBuffer.get(1); + this.authMethod = AuthMethod.valueOf(authbyte); + if (version != CURRENT_VERSION || authMethod == null) { + return doBadPreambleHandling("serverVersion=" + CURRENT_VERSION + + ", clientVersion=" + version + ", authMethod=" + authbyte + + ", authSupported=" + (authMethod != null)); } dataLengthBuffer.clear(); - if (authMethod == null) { - throw new IOException("Unable to read authentication method"); - } if (isSecurityEnabled && authMethod == AuthMethod.SIMPLE) { - AccessControlException ae = new AccessControlException( - "Authentication is required"); - setupResponse(authFailedResponse, authFailedCall, Status.FATAL, - ae.getClass().getName(), ae.getMessage()); + AccessControlException ae = new AccessControlException("Authentication is required"); + setupResponse(authFailedResponse, authFailedCall, ae, ae.getMessage()); responder.doRespond(authFailedCall); throw ae; } @@ -1412,18 +1463,18 @@ public abstract class HBaseServer implements RpcServer { if (authMethod != AuthMethod.SIMPLE) { useSasl = true; } - rpcHeaderBuffer = null; - rpcHeaderRead = true; + connectionPreambleRead = true; + // Preamble checks out. continue; } - + // We have read a length and we have read the preamble. It is either the connection header + // or it is a request. if (data == null) { - dataLengthBuffer.flip(); - dataLength = dataLengthBuffer.getInt(); - + this.dataLengthBuffer.flip(); + dataLength = this.dataLengthBuffer.getInt(); if (dataLength == HBaseClient.PING_CALL_ID) { - if(!useWrap) { //covers the !useSasl too + if (!useWrap) { //covers the !useSasl too dataLengthBuffer.clear(); return 0; //ping message } @@ -1435,9 +1486,7 @@ public abstract class HBaseServer implements RpcServer { data = ByteBuffer.allocate(dataLength); incRpcCount(); // Increment the rpc count } - count = channelRead(channel, data); - if (data.remaining() == 0) { dataLengthBuffer.clear(); data.flip(); @@ -1446,13 +1495,13 @@ public abstract class HBaseServer implements RpcServer { skipInitialSaslHandshake = false; continue; } - boolean isHeaderRead = headerRead; + boolean isHeaderRead = connectionHeaderReader; if (useSasl) { saslReadAndProcess(data.array()); } else { processOneRpc(data.array()); } - data = null; + this.data = null; if (!isHeaderRead) { continue; } @@ -1461,50 +1510,29 @@ public abstract class HBaseServer implements RpcServer { } } - /** - * Try to set up the response to indicate that the client version - * is incompatible with the server. This can contain special-case - * code to speak enough of past IPC protocols to pass back - * an exception to the caller. - * @param clientVersion the version the caller is using - * @throws IOException - */ - private void setupBadVersionResponse(int clientVersion) throws IOException { - String errMsg = "Server IPC version " + CURRENT_VERSION + - " cannot communicate with client version " + clientVersion; - ByteArrayOutputStream buffer = new ByteArrayOutputStream(); - - if (clientVersion >= 3) { - // We used to return an id of -1 which caused server to close the - // connection without telling the client what the problem was. Now - // we return 0 which will keep the socket up -- bad clients, unless - // they switch to suit the running server -- will fail later doing - // getProtocolVersion. - Call fakeCall = new Call(0, null, this, responder, 0, null); - // Versions 3 and greater can interpret this exception - // response in the same manner - setupResponse(buffer, fakeCall, Status.FATAL, - VersionMismatch.class.getName(), errMsg); - - responder.doRespond(fakeCall); - } + private int doBadPreambleHandling(final String errMsg) throws IOException { + String msg = errMsg + "; cannot communicate with client at " + hostAddress + ":" + port; + LOG.warn(msg); + Call fakeCall = new Call(-1, null, null, this, responder, -1, null); + setupResponse(null, fakeCall, new FatalConnectionException(msg), msg); + responder.doRespond(fakeCall); + // Returning -1 closes out the connection. + return -1; } - /// Reads the connection header following version - private void processHeader(byte[] buf) throws IOException { - DataInputStream in = - new DataInputStream(new ByteArrayInputStream(buf)); - header = ConnectionHeader.parseFrom(in); + // Reads the connection header following version + private void processConnectionHeader(byte[] buf) throws IOException { + this.connectionHeader = ConnectionHeader.parseFrom(buf); try { - String protocolClassName = header.getProtocol(); + String protocolClassName = connectionHeader.getProtocol(); if (protocolClassName != null) { - protocol = getProtocolClass(header.getProtocol(), conf); + protocol = getProtocolClass(connectionHeader.getProtocol(), conf); } } catch (ClassNotFoundException cnfe) { - throw new IOException("Unknown protocol: " + header.getProtocol()); + throw new IOException("Unknown protocol: " + connectionHeader.getProtocol()); } - UserGroupInformation protocolUser = createUser(header); + UserGroupInformation protocolUser = createUser(connectionHeader); if (!useSasl) { user = protocolUser; if (user != null) { @@ -1578,74 +1606,86 @@ public abstract class HBaseServer implements RpcServer { private void processOneRpc(byte[] buf) throws IOException, InterruptedException { - if (headerRead) { - processData(buf); + if (connectionHeaderReader) { + processRequest(buf); } else { - processHeader(buf); - headerRead = true; + processConnectionHeader(buf); + this.connectionHeaderReader = true; if (!authorizeConnection()) { throw new AccessControlException("Connection from " + this - + " for protocol " + header.getProtocol() + + " for protocol " + connectionHeader.getProtocol() + " is unauthorized for user " + user); } } } - protected void processData(byte[] buf) throws IOException, InterruptedException { - DataInputStream dis = - new DataInputStream(new ByteArrayInputStream(buf)); - RpcRequestHeader request = RpcRequestHeader.parseDelimitedFrom(dis); - - int id = request.getCallId(); + /** + * @param buf Has the request header and the request param and optionally encoded data buffer + * all in this one array. + * @throws IOException + * @throws InterruptedException + */ + protected void processRequest(byte[] buf) throws IOException, InterruptedException { long callSize = buf.length; - + int offset = 0; + // Here we read in the header. We avoid having pb + // do its default 4k allocation for CodedInputStream. We force it to use backing array. + CodedInputStream cis = CodedInputStream.newInstance(buf, offset, buf.length); + int headerSize = cis.readRawVarint32(); + offset = cis.getTotalBytesRead(); + RequestHeader header = + RequestHeader.newBuilder().mergeFrom(buf, offset, headerSize).build(); + offset += headerSize; + int id = header.getCallId(); if (LOG.isDebugEnabled()) { - LOG.debug(" got call #" + id + ", " + callSize + " bytes"); + LOG.debug(" requestHeader " + TextFormat.shortDebugString(header) + + " callSize: " + callSize + " bytes"); } // Enforcing the call queue size, this triggers a retry in the client + // This is a bit late to be doing this check - we have already read in the total request. if ((callSize + callQueueSize.get()) > maxQueueSize) { - final Call callTooBig = new Call(id, null, this, responder, callSize, - null); + final Call callTooBig = new Call(id, null, null, this, responder, callSize, null); ByteArrayOutputStream responseBuffer = new ByteArrayOutputStream(); - setupResponse(responseBuffer, callTooBig, Status.FATAL, - IOException.class.getName(), - "Call queue is full, is ipc.server.max.callqueue.size too small?"); + setupResponse(responseBuffer, callTooBig, new CallQueueTooBigException(), + "Call queue is full, is ipc.server.max.callqueue.size too small?"); responder.doRespond(callTooBig); return; } - - RpcRequestBody rpcRequestBody; + Method method = null; + Message param = null; try { - rpcRequestBody = RpcRequestBody.parseDelimitedFrom(dis); + if (header.hasRequestParamMeta()) { + method = methodCache.getMethod(this.protocol, header.getMethodName()); + Message m = methodCache.getMethodArgType(method); + Builder builder = m.newBuilderForType(); + cis = CodedInputStream.newInstance(buf, offset, buf.length); + int paramSize = cis.readRawVarint32(); + offset += cis.getTotalBytesRead(); + builder.mergeFrom(buf, offset, paramSize); + param = builder.build(); + } } catch (Throwable t) { - LOG.warn("Unable to read call parameters for client " + - getHostAddress(), t); - final Call readParamsFailedCall = new Call(id, null, this, responder, - callSize, null); + String msg = "Unable to read call parameter from client " + getHostAddress(); + LOG.warn(msg, t); + final Call readParamsFailedCall = + new Call(id, null, null, this, responder, callSize, null); ByteArrayOutputStream responseBuffer = new ByteArrayOutputStream(); - - setupResponse(responseBuffer, readParamsFailedCall, Status.FATAL, - t.getClass().getName(), - "IPC server unable to read call parameters: " + t.getMessage()); + setupResponse(responseBuffer, readParamsFailedCall, t, + msg + "; " + t.getMessage()); responder.doRespond(readParamsFailedCall); return; } - Call call; - if (request.hasTinfo()) { - call = new Call(id, rpcRequestBody, this, responder, callSize, - new TraceInfo(request.getTinfo().getTraceId(), request.getTinfo() - .getParentId())); - } else { - call = new Call(id, rpcRequestBody, this, responder, callSize, null); - } - + Call call = (header.hasTraceInfo())? + new Call(id, method, param, this, responder, callSize, + new TraceInfo(header.getTraceInfo().getTraceId(), header.getTraceInfo().getParentId())): + new Call(id, method, param, this, responder, callSize, null); callQueueSize.add(callSize); - - if (priorityCallQueue != null && getQosLevel(rpcRequestBody) > highPriorityLevel) { + Pair headerAndParam = new Pair(header, param); + if (priorityCallQueue != null && getQosLevel(headerAndParam) > highPriorityLevel) { priorityCallQueue.put(call); - } else if (replicationQueue != null - && getQosLevel(rpcRequestBody) == HConstants.REPLICATION_QOS) { + } else if (replicationQueue != null && + getQosLevel(headerAndParam) == HConstants.REPLICATION_QOS) { replicationQueue.put(call); } else { callQueue.put(call); // queue the call; maybe blocked here @@ -1662,16 +1702,15 @@ public abstract class HBaseServer implements RpcServer { && (authMethod != AuthMethod.DIGEST)) { ProxyUsers.authorize(user, this.getHostAddress(), conf); } - authorize(user, header, getHostInetAddress()); + authorize(user, connectionHeader, getHostInetAddress()); if (LOG.isDebugEnabled()) { - LOG.debug("Successfully authorized " + header); + LOG.debug("Successfully authorized " + TextFormat.shortDebugString(connectionHeader)); } metrics.authorizationSuccess(); } catch (AuthorizationException ae) { LOG.debug("Connection authorization failed: "+ae.getMessage(), ae); metrics.authorizationFailure(); - setupResponse(authFailedResponse, authFailedCall, Status.FATAL, - ae.getClass().getName(), ae.getMessage()); + setupResponse(authFailedResponse, authFailedCall, ae, ae.getMessage()); responder.doRespond(authFailedCall); return false; } @@ -1741,53 +1780,41 @@ public abstract class HBaseServer implements RpcServer { @Override public void run() { - LOG.info(getName() + ": starting"); + LOG.info("Starting"); status.setStatus("starting"); SERVER.set(HBaseServer.this); while (running) { - try { status.pause("Waiting for a call"); Call call = myCallQueue.take(); // pop the queue; maybe blocked here status.setStatus("Setting up call"); - status.setConnection(call.connection.getHostAddress(), - call.connection.getRemotePort()); - - if (LOG.isDebugEnabled()) - LOG.debug(getName() + ": has #" + call.id + " from " + - call.connection); - - String errorClass = null; + status.setConnection(call.connection.getHostAddress(), call.connection.getRemotePort()); + if (LOG.isDebugEnabled()) { + UserGroupInformation remoteUser = call.connection.user; + LOG.debug(call.toString() + + " executing as " + ((remoteUser == null)? "NULL principal": remoteUser.getUserName())); + } + Throwable errorThrowable = null; String error = null; Message value = null; - CurCall.set(call); Span currentRequestSpan = NullSpan.getInstance(); try { - if (!started) + if (!started) { throw new ServerNotRunningYetException("Server is not running yet"); - + } if (call.tinfo != null) { currentRequestSpan = Trace.startSpan( "handling " + call.toString(), call.tinfo, Sampler.ALWAYS); } - - if (LOG.isDebugEnabled()) { - UserGroupInformation remoteUser = call.connection.user; - LOG.debug(getName() + ": call #" + call.id + " executing as " - + (remoteUser == null ? "NULL principal" : - remoteUser.getUserName())); - } - RequestContext.set(User.create(call.connection.user), getRemoteIp(), - call.connection.protocol); + call.connection.protocol); // make the call - value = call(call.connection.protocol, call.rpcRequestBody, call.timestamp, - status); + value = call(call.connection.protocol, call.method, call.param, call.timestamp, status); } catch (Throwable e) { - LOG.debug(getName()+", call "+call+": error: " + e, e); - errorClass = e.getClass().getName(); + LOG.debug(call.toString() + " error: " + e, e); + errorThrowable = e; error = StringUtils.stringifyException(e); } finally { currentRequestSpan.stop(); @@ -1800,21 +1827,18 @@ public abstract class HBaseServer implements RpcServer { // Set the response for undelayed calls and delayed calls with // undelayed responses. if (!call.isDelayed() || !call.isReturnValueDelayed()) { - call.setResponse(value, - errorClass == null? Status.SUCCESS: Status.ERROR, - errorClass, error); + call.setResponse(value, errorThrowable, error); } call.sendResponseIfReady(); status.markComplete("Sent response"); } catch (InterruptedException e) { if (running) { // unexpected -- log it - LOG.info(getName() + " caught: " + - StringUtils.stringifyException(e)); + LOG.info("Caught: " + StringUtils.stringifyException(e)); } } catch (OutOfMemoryError e) { if (errorHandler != null) { if (errorHandler.checkOOME(e)) { - LOG.info(getName() + ": exiting on OOME"); + LOG.info("Exiting on OOME"); return; } } else { @@ -1822,44 +1846,16 @@ public abstract class HBaseServer implements RpcServer { throw e; } } catch (ClosedChannelException cce) { - LOG.warn(getName() + " caught a ClosedChannelException, " + + LOG.warn("Caught a ClosedChannelException, " + "this means that the server was processing a " + "request but the client went away. The error message was: " + cce.getMessage()); } catch (Exception e) { - LOG.warn(getName() + " caught: " + - StringUtils.stringifyException(e)); + LOG.warn("Caught: " + StringUtils.stringifyException(e)); } } - LOG.info(getName() + ": exiting"); - } - - } - - - private Function qosFunction = null; - - /** - * Gets the QOS level for this call. If it is higher than the highPriorityLevel and there - * are priorityHandlers available it will be processed in it's own thread set. - * - * @param newFunc - */ - @Override - public void setQosFunction(Function newFunc) { - qosFunction = newFunc; - } - - protected int getQosLevel(RpcRequestBody rpcRequestBody) { - if (qosFunction == null) { - return 0; - } - - Integer res = qosFunction.apply(rpcRequestBody); - if (res == null) { - return 0; + LOG.info("Exiting"); } - return res; } /* Constructs a server listening on the named port and address. Parameters passed must @@ -1945,12 +1941,10 @@ public abstract class HBaseServer implements RpcServer { * @param error error message, if the call failed * @throws IOException */ - private void setupResponse(ByteArrayOutputStream response, - Call call, Status status, - String errorClass, String error) + private void setupResponse(ByteArrayOutputStream response, Call call, Throwable t, String error) throws IOException { - response.reset(); - call.setResponse(null, status, errorClass, error); + if (response != null) response.reset(); + call.setResponse(null, t, error); } protected void closeConnection(Connection connection) { @@ -2090,6 +2084,7 @@ public abstract class HBaseServer implements RpcServer { * @param addr InetAddress of incoming connection * @throws org.apache.hadoop.security.authorize.AuthorizationException when the client isn't authorized to talk the protocol */ + @SuppressWarnings("static-access") public void authorize(UserGroupInformation user, ConnectionHeader connection, InetAddress addr diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcClientEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcClientEngine.java index 92bb807..9ecc1ee 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcClientEngine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcClientEngine.java @@ -27,13 +27,10 @@ import java.net.InetSocketAddress; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import javax.net.SocketFactory; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.IpcProtocol; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.ipc.RemoteException; @@ -83,11 +80,15 @@ public class ProtobufRpcClientEngine implements RpcClientEngine { this.rpcTimeout = rpcTimeout; } - private RpcRequestBody constructRpcRequest(Method method, - Object[] params) throws ServiceException { - RpcRequestBody rpcRequest; - RpcRequestBody.Builder builder = RpcRequestBody.newBuilder(); - builder.setMethodName(method.getName()); + /** + * Will go away. Currently kept while working on rpc specification. + * @param methodName + * @param params + * @return + * @throws ServiceException + */ + private Message getParam(final String methodName, final Object[] params) + throws ServiceException { Message param; int length = params.length; if (length == 2) { @@ -98,13 +99,10 @@ public class ProtobufRpcClientEngine implements RpcClientEngine { param = (Message)params[0]; } else { throw new ServiceException("Too many parameters for request. Method: [" - + method.getName() + "]" + ", Expected: 2, Actual: " + + methodName + "]" + ", Expected: 2, Actual: " + params.length); } - builder.setRequestClassName(param.getClass().getName()); - builder.setRequest(param.toByteString()); - rpcRequest = builder.build(); - return rpcRequest; + return param; } /** @@ -133,10 +131,10 @@ public class ProtobufRpcClientEngine implements RpcClientEngine { startTime = System.currentTimeMillis(); } - RpcRequestBody rpcRequest = constructRpcRequest(method, args); + Message param = getParam(method.getName(), args); Message val = null; try { - val = client.call(rpcRequest, address, protocol, ticket, rpcTimeout); + val = client.call(method, param, address, protocol, ticket, rpcTimeout); if (LOG.isDebugEnabled()) { long callTime = System.currentTimeMillis() - startTime; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcServerEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcServerEngine.java index cf68497..9cc74d6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcServerEngine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcServerEngine.java @@ -23,7 +23,6 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -34,7 +33,6 @@ import org.apache.hadoop.hbase.IpcProtocol; import org.apache.hadoop.hbase.client.Operation; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.security.HBasePolicyProvider; import org.apache.hadoop.hbase.security.token.AuthenticationTokenSecretManager; @@ -44,6 +42,7 @@ import org.codehaus.jackson.map.ObjectMapper; import com.google.protobuf.Message; import com.google.protobuf.ServiceException; +import com.google.protobuf.TextFormat; /** * The {@link RpcServerEngine} implementation for ProtoBuf-based RPCs. */ @@ -65,7 +64,6 @@ class ProtobufRpcServerEngine implements RpcServerEngine { metaHandlerCount, verbose, highPriorityLevel); } - public static class Server extends HBaseServer { boolean verbose; Object instance; @@ -110,10 +108,6 @@ class ProtobufRpcServerEngine implements RpcServerEngine { this.instance = instance; this.implementation = instance.getClass(); } - private static final Map methodArg = - new ConcurrentHashMap(); - private static final Map methodInstances = - new ConcurrentHashMap(); private AuthenticationTokenSecretManager createSecretManager(){ if (!isSecurityEnabled || @@ -151,37 +145,20 @@ class ProtobufRpcServerEngine implements RpcServerEngine { * the return response has protobuf response payload. On failure, the * exception name and the stack trace are returned in the protobuf response. */ - public Message call(Class protocol, - RpcRequestBody rpcRequest, long receiveTime, MonitoredRPCHandler status) + public Message call(Class protocol, Method method, + Message param, long receiveTime, MonitoredRPCHandler status) throws IOException { try { - String methodName = rpcRequest.getMethodName(); - Method method = getMethod(protocol, methodName); - if (method == null) { - throw new UnknownProtocolException("Method " + methodName + - " doesn't exist in protocol " + protocol.getName()); - } - - /** - * RPCs for a particular interface (ie protocol) are done using a - * IPC connection that is setup using rpcProxy. - * The rpcProxy's has a declared protocol name that is - * sent form client to server at connection time. - */ - if (verbose) { - LOG.info("Call: protocol name=" + protocol.getName() + - ", method=" + methodName); + LOG.info("callId: " + CurCall.get().id + " protocol: " + protocol.getName() + + " method: " + method.getName()); } - status.setRPC(rpcRequest.getMethodName(), - new Object[]{rpcRequest.getRequest()}, receiveTime); - status.setRPCPacket(rpcRequest); + status.setRPC(method.getName(), new Object[]{param}, receiveTime); + // TODO: Review after we add in encoded data blocks. + status.setRPCPacket(param); status.resume("Servicing call"); //get an instance of the method arg type - Message protoType = getMethodArgType(method); - Message param = protoType.newBuilderForType() - .mergeFrom(rpcRequest.getRequest()).build(); Message result; Object impl = null; if (protocol.isAssignableFrom(this.implementation)) { @@ -189,7 +166,6 @@ class ProtobufRpcServerEngine implements RpcServerEngine { } else { throw new UnknownProtocolException(protocol); } - long startTime = System.currentTimeMillis(); if (method.getParameterTypes().length == 2) { // RpcController + Message in the method args @@ -206,18 +182,16 @@ class ProtobufRpcServerEngine implements RpcServerEngine { int processingTime = (int) (System.currentTimeMillis() - startTime); int qTime = (int) (startTime-receiveTime); if (TRACELOG.isDebugEnabled()) { - TRACELOG.debug("Call #" + CurCall.get().id + - "; served=" + protocol.getSimpleName() + "#" + method.getName() + - ", queueTime=" + qTime + - ", processingTime=" + processingTime + - ", request=" + param.toString() + - " response=" + result.toString()); + TRACELOG.debug(CurCall.get().toString() + + " response: " + TextFormat.shortDebugString(result) + + " served: " + protocol.getSimpleName() + + " queueTime: " + qTime + + " processingTime: " + processingTime); } metrics.dequeuedCall(qTime); metrics.processedCall(processingTime); - if (verbose) { - log("Return: "+result, LOG); + log("Return " + TextFormat.shortDebugString(result), LOG); } long responseSize = result.getSerializedSize(); // log any RPC responses that are slower than the configured warn @@ -230,12 +204,12 @@ class ProtobufRpcServerEngine implements RpcServerEngine { // when tagging, we let TooLarge trump TooSmall to keep output simple // note that large responses will often also be slow. StringBuilder buffer = new StringBuilder(256); - buffer.append(methodName); + buffer.append(method.getName()); buffer.append("("); buffer.append(param.getClass().getName()); buffer.append(")"); - logResponse(new Object[]{rpcRequest.getRequest()}, - methodName, buffer.toString(), (tooLarge ? "TooLarge" : "TooSlow"), + logResponse(new Object[]{param}, + method.getName(), buffer.toString(), (tooLarge ? "TooLarge" : "TooSlow"), status.getClient(), startTime, processingTime, qTime, responseSize); } @@ -261,48 +235,6 @@ class ProtobufRpcServerEngine implements RpcServerEngine { } } - static Method getMethod(Class protocol, - String methodName) { - Method method = methodInstances.get(methodName); - if (method != null) { - return method; - } - Method[] methods = protocol.getMethods(); - for (Method m : methods) { - if (m.getName().equals(methodName)) { - m.setAccessible(true); - methodInstances.put(methodName, m); - return m; - } - } - return null; - } - - static Message getMethodArgType(Method method) throws Exception { - Message protoType = methodArg.get(method.getName()); - if (protoType != null) { - return protoType; - } - - Class[] args = method.getParameterTypes(); - Class arg; - if (args.length == 2) { - // RpcController + Message in the method args - // (generated code from RPC bits in .proto files have RpcController) - arg = args[1]; - } else if (args.length == 1) { - arg = args[0]; - } else { - //unexpected - return null; - } - //in the protobuf methods, args[1] is the only significant argument - Method newInstMethod = arg.getMethod("getDefaultInstance"); - newInstMethod.setAccessible(true); - protoType = (Message) newInstMethod.invoke(null, (Object[]) null); - methodArg.put(method.getName(), protoType); - return protoType; - } /** * Logs an RPC response to the LOG file, producing valid JSON objects for * client Operations. @@ -360,10 +292,12 @@ class ProtobufRpcServerEngine implements RpcServerEngine { mapper.writeValueAsString(responseInfo)); } } + protected static void log(String value, Log LOG) { String v = value; - if (v != null && v.length() > 55) - v = v.substring(0, 55)+"..."; + final int max = 100; + if (v != null && v.length() > max) + v = v.substring(0, max) + "..."; LOG.info(v); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index 6e244bd..16c942a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -25,9 +25,11 @@ import com.google.protobuf.Message; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.IpcProtocol; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader; +import org.apache.hadoop.hbase.util.Pair; import java.io.IOException; +import java.lang.reflect.Method; import java.net.InetSocketAddress; @InterfaceAudience.Private @@ -47,19 +49,19 @@ public interface RpcServer { InetSocketAddress getListenerAddress(); /** Called for each call. + * @param method Method to invoke. * @param param parameter * @param receiveTime time + * @param status * @return Message Protobuf response Message * @throws java.io.IOException e */ - Message call(Class protocol, - RpcRequestBody param, long receiveTime, MonitoredRPCHandler status) - throws IOException; + Message call(Class protocol, Method method, + Message param, long receiveTime, MonitoredRPCHandler status) + throws IOException; void setErrorHandler(HBaseRPCErrorHandler handler); - void setQosFunction(Function newFunc); - void openServer(); void startThreads(); @@ -68,4 +70,6 @@ public interface RpcServer { * Returns the metrics instance for reporting RPC call statistics */ MetricsHBaseServer getMetrics(); + + public void setQosFunction(Function, Integer> newFunc); } \ No newline at end of file diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java index 795b88c..c5fa7b8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java @@ -19,7 +19,8 @@ package org.apache.hadoop.hbase.monitoring; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; + +import com.google.protobuf.Message; /** * A MonitoredTask implementation optimized for use with RPC Handlers @@ -30,6 +31,7 @@ import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; public interface MonitoredRPCHandler extends MonitoredTask { public abstract String getRPC(); public abstract String getRPC(boolean withParams); + // TODO: Make sure length is good in here when we add encoded data blocks. public abstract long getRPCPacketLength(); public abstract String getClient(); public abstract long getRPCStartTime(); @@ -37,8 +39,7 @@ public interface MonitoredRPCHandler extends MonitoredTask { public abstract boolean isRPCRunning(); public abstract boolean isOperationRunning(); - public abstract void setRPC(String methodName, Object [] params, - long queueTime); - public abstract void setRPCPacket(RpcRequestBody param); + public abstract void setRPC(String methodName, Object [] params, long queueTime); + public abstract void setRPCPacket(Message param); public abstract void setConnection(String clientAddress, int remotePort); -} +} \ No newline at end of file diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java index bb5b928..cb98034 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java @@ -18,19 +18,15 @@ */ package org.apache.hadoop.hbase.monitoring; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Operation; -import org.apache.hadoop.hbase.io.WritableWithSize; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.io.Writable; -import org.codehaus.jackson.map.ObjectMapper; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Map; +import com.google.protobuf.Message; /** * A MonitoredTask implementation designed for use with RPC Handlers @@ -46,7 +42,7 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl private long rpcStartTime; private String methodName = ""; private Object [] params = {}; - private RpcRequestBody packet; + private Message packet; public MonitoredRPCHandlerImpl() { super(); @@ -201,7 +197,7 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl * that it can later compute its size if asked for it. * @param param The protobuf received by the RPC for this call */ - public void setRPCPacket(RpcRequestBody param) { + public void setRPCPacket(Message param) { this.packet = param; } @@ -257,4 +253,4 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl } return super.toString() + ", rpcMethod=" + getRPC(); } -} +} \ No newline at end of file diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java index 9a19cb2..c8cbb72 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java @@ -176,5 +176,4 @@ class MonitoredTaskImpl implements MonitoredTask { sb.append(getCompletionTimestamp()); return sb.toString(); } - -} +} \ No newline at end of file diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index c8ea3bd..5175178 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -25,11 +25,9 @@ import java.lang.annotation.RetentionPolicy; import java.lang.management.ManagementFactory; import java.lang.management.MemoryUsage; import java.lang.reflect.Constructor; -import java.lang.reflect.Method; import java.net.BindException; import java.net.InetSocketAddress; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; @@ -53,7 +51,6 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; import javax.management.ObjectName; -import com.google.protobuf.Message; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -66,9 +63,9 @@ import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.FailedSanityCheckException; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.HealthCheckChore; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.HealthCheckChore; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.OutOfOrderScannerNextException; @@ -166,7 +163,6 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest; @@ -212,10 +208,11 @@ import org.apache.hadoop.util.StringUtils; import org.apache.zookeeper.KeeperException; import org.cliffc.high_scale_lib.Counter; -import com.google.common.base.Function; import com.google.protobuf.ByteString; +import com.google.protobuf.Message; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; +import com.google.protobuf.TextFormat; /** * HRegionServer makes a set of HRegions available to clients. It checks in with @@ -499,7 +496,7 @@ public class HRegionServer implements ClientProtocol, this.isa = this.rpcServer.getListenerAddress(); this.rpcServer.setErrorHandler(this); - this.rpcServer.setQosFunction((qosFunction = new QosFunction())); + this.rpcServer.setQosFunction((qosFunction = new QosFunction(this))); this.startcode = System.currentTimeMillis(); // login the zookeeper client principal (if using security) @@ -559,152 +556,6 @@ public class HRegionServer implements ClientProtocol, } /** - * Utility used ensuring higher quality of service for priority rpcs; e.g. - * rpcs to .META. and -ROOT-, etc. - */ - class QosFunction implements Function { - private final Map annotatedQos; - //We need to mock the regionserver instance for some unit tests (set via - //setRegionServer method. - //The field value is initially set to the enclosing instance of HRegionServer. - private HRegionServer hRegionServer = HRegionServer.this; - - //The logic for figuring out high priority RPCs is as follows: - //1. if the method is annotated with a QosPriority of QOS_HIGH, - // that is honored - //2. parse out the protobuf message and see if the request is for meta - // region, and if so, treat it as a high priority RPC - //Some optimizations for (2) are done here - - //Clients send the argument classname as part of making the RPC. The server - //decides whether to deserialize the proto argument message based on the - //pre-established set of argument classes (knownArgumentClasses below). - //This prevents the server from having to deserialize all proto argument - //messages prematurely. - //All the argument classes declare a 'getRegion' method that returns a - //RegionSpecifier object. Methods can be invoked on the returned object - //to figure out whether it is a meta region or not. - @SuppressWarnings("unchecked") - private final Class[] knownArgumentClasses = new Class[]{ - GetRegionInfoRequest.class, - GetStoreFileRequest.class, - CloseRegionRequest.class, - FlushRegionRequest.class, - SplitRegionRequest.class, - CompactRegionRequest.class, - GetRequest.class, - MutateRequest.class, - ScanRequest.class, - MultiRequest.class - }; - - //Some caches for helping performance - private final Map> argumentToClassMap = - new HashMap>(); - private final Map, Method>> - methodMap = new HashMap, Method>>(); - - public QosFunction() { - Map qosMap = new HashMap(); - for (Method m : HRegionServer.class.getMethods()) { - QosPriority p = m.getAnnotation(QosPriority.class); - if (p != null) { - qosMap.put(m.getName(), p.priority()); - } - } - - annotatedQos = qosMap; - if (methodMap.get("parseFrom") == null) { - methodMap.put("parseFrom", - new HashMap, Method>()); - } - if (methodMap.get("getRegion") == null) { - methodMap.put("getRegion", - new HashMap, Method>()); - } - for (Class cls : knownArgumentClasses) { - argumentToClassMap.put(cls.getCanonicalName(), cls); - try { - methodMap.get("parseFrom").put(cls, - cls.getDeclaredMethod("parseFrom",ByteString.class)); - methodMap.get("getRegion").put(cls, cls.getDeclaredMethod("getRegion")); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - } - - void setRegionServer(HRegionServer server) { - this.hRegionServer = server; - } - - public boolean isMetaRegion(byte[] regionName) { - HRegion region; - try { - region = hRegionServer.getRegion(regionName); - } catch (NotServingRegionException ignored) { - return false; - } - return region.getRegionInfo().isMetaRegion(); - } - - @Override - public Integer apply(RpcRequestBody from) { - String methodName = from.getMethodName(); - Class rpcArgClass = null; - if (from.hasRequestClassName()) { - String cls = from.getRequestClassName(); - rpcArgClass = argumentToClassMap.get(cls); - } - - Integer priorityByAnnotation = annotatedQos.get(methodName); - if (priorityByAnnotation != null) { - return priorityByAnnotation; - } - - if (rpcArgClass == null || from.getRequest().isEmpty()) { - return HConstants.NORMAL_QOS; - } - Object deserializedRequestObj; - //check whether the request has reference to Meta region - try { - Method parseFrom = methodMap.get("parseFrom").get(rpcArgClass); - deserializedRequestObj = parseFrom.invoke(null, from.getRequest()); - Method getRegion = methodMap.get("getRegion").get(rpcArgClass); - RegionSpecifier regionSpecifier = - (RegionSpecifier)getRegion.invoke(deserializedRequestObj, - (Object[])null); - HRegion region = hRegionServer.getRegion(regionSpecifier); - if (region.getRegionInfo().isMetaTable()) { - if (LOG.isDebugEnabled()) { - LOG.debug("High priority: " + from.toString()); - } - return HConstants.HIGH_QOS; - } - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - if (methodName.equals("scan")) { // scanner methods... - ScanRequest request = (ScanRequest)deserializedRequestObj; - if (!request.hasScannerId()) { - return HConstants.NORMAL_QOS; - } - RegionScanner scanner = hRegionServer.getScanner(request.getScannerId()); - if (scanner != null && scanner.getRegionInfo().isMetaTable()) { - if (LOG.isDebugEnabled()) { - LOG.debug("High priority scanner request: " + request.getScannerId()); - } - return HConstants.HIGH_QOS; - } - } - if (LOG.isDebugEnabled()) { - LOG.debug("Low priority: " + from.toString()); - } - return HConstants.NORMAL_QOS; - } - } - - /** * All initialization needed before we go register with Master. * * @throws IOException @@ -1131,8 +982,8 @@ public class HRegionServer implements ClientProtocol, private void closeWAL(final boolean delete) { try { if (this.hlogForMeta != null) { - //All hlogs (meta and non-meta) are in the same directory. Don't call - //closeAndDelete here since that would delete all hlogs not just the + //All hlogs (meta and non-meta) are in the same directory. Don't call + //closeAndDelete here since that would delete all hlogs not just the //meta ones. We will just 'close' the hlog for meta here, and leave //the directory cleanup to the follow-on closeAndDelete call. this.hlogForMeta.close(); @@ -1415,8 +1266,8 @@ public class HRegionServer implements ClientProtocol, Path logdir = new Path(rootDir, logName); if (LOG.isDebugEnabled()) LOG.debug("logdir=" + logdir); - this.hlogForMeta = HLogFactory.createMetaHLog(this.fs.getBackingFs(), - rootDir, logName, this.conf, getMetaWALActionListeners(), + this.hlogForMeta = HLogFactory.createMetaHLog(this.fs.getBackingFs(), + rootDir, logName, this.conf, getMetaWALActionListeners(), this.serverNameFromMasterPOV.toString()); } return this.hlogForMeta; @@ -1518,7 +1369,7 @@ public class HRegionServer implements ClientProtocol, ".compactionChecker", uncaughtExceptionHandler); if (this.healthCheckChore != null) { Threads - .setDaemonThreadRunning(this.healthCheckChore.getThread(), n + ".healthChecker", + .setDaemonThreadRunning(this.healthCheckChore.getThread(), n + ".healthChecker", uncaughtExceptionHandler); } @@ -1612,17 +1463,17 @@ public class HRegionServer implements ClientProtocol, return getWAL(null); } catch (IOException e) { LOG.warn("getWAL threw exception " + e); - return null; + return null; } } @Override public HLog getWAL(HRegionInfo regionInfo) throws IOException { //TODO: at some point this should delegate to the HLogFactory - //currently, we don't care about the region as much as we care about the + //currently, we don't care about the region as much as we care about the //table.. (hence checking the tablename below) - //_ROOT_ and .META. regions have separate WAL. - if (regionInfo != null && + //_ROOT_ and .META. regions have separate WAL. + if (regionInfo != null && regionInfo.isMetaTable()) { return getMetaWAL(); } @@ -1727,15 +1578,15 @@ public class HRegionServer implements ClientProtocol, if (cause != null) { msg += "\nCause:\n" + StringUtils.stringifyException(cause); } - if (hbaseMaster != null) { + // Report to the master but only if we have already registered with the master. + if (hbaseMaster != null && this.serverNameFromMasterPOV != null) { ReportRSFatalErrorRequest.Builder builder = ReportRSFatalErrorRequest.newBuilder(); ServerName sn = ServerName.parseVersionedServerName(this.serverNameFromMasterPOV.getVersionedBytes()); builder.setServer(ProtobufUtil.toServerName(sn)); builder.setErrorMessage(msg); - hbaseMaster.reportRSFatalError( - null,builder.build()); + hbaseMaster.reportRSFatalError(null, builder.build()); } } catch (Throwable t) { LOG.warn("Unable to report fatal error to master", t); @@ -2992,7 +2843,8 @@ public class HRegionServer implements ClientProtocol, if (rsh != null) { if (request.getNextCallSeq() != rsh.nextCallSeq) { throw new OutOfOrderScannerNextException("Expected nextCallSeq: " + rsh.nextCallSeq - + " But the nextCallSeq got from client: " + request.getNextCallSeq()); + + " But the nextCallSeq got from client: " + request.getNextCallSeq() + + "; request=" + TextFormat.shortDebugString(request)); } // Increment the nextCallSeq value which is the next expected from client. rsh.nextCallSeq++; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java index 09a26ff..e9fb938 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java @@ -151,7 +151,7 @@ public class HBaseSaslRpcServer { private static final int FIRST_CODE = values()[0].code; /** Return the object represented by the code. */ - private static AuthMethod valueOf(byte code) { + public static AuthMethod valueOf(byte code) { final int i = (code & 0xff) - FIRST_CODE; return i < 0 || i >= values().length ? null : values()[i]; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditor.java index f2a5fb3..1510770 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditor.java @@ -33,6 +33,9 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.ScannerCallable; +import org.apache.hadoop.hbase.ipc.HBaseClient; +import org.apache.hadoop.hbase.ipc.HBaseServer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; @@ -40,6 +43,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.apache.commons.logging.impl.Log4JLogger; +import org.apache.log4j.Level; /** * Test {@link MetaReader}, {@link MetaEditor}, and {@link RootLocationEditor}. @@ -63,10 +68,12 @@ public class TestMetaReaderEditor { public boolean isAborted() { return abort.get(); } - }; @BeforeClass public static void beforeClass() throws Exception { + ((Log4JLogger)HBaseServer.LOG).getLogger().setLevel(Level.ALL); + ((Log4JLogger)HBaseClient.LOG).getLogger().setLevel(Level.ALL); + ((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL); UTIL.startMiniCluster(3); Configuration c = new Configuration(UTIL.getConfiguration()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java index f54350d..aaa8e10 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java @@ -25,6 +25,7 @@ import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.spy; import java.io.IOException; +import java.lang.reflect.Method; import java.net.InetSocketAddress; import java.net.Socket; @@ -37,7 +38,6 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.IpcProtocol; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.util.StringUtils; @@ -59,8 +59,8 @@ public class TestIPC { } @Override - public Message call(Class protocol, - RpcRequestBody param, long receiveTime, MonitoredRPCHandler status) + public Message call(Class protocol, Method method, + Message param, long receiveTime, MonitoredRPCHandler status) throws IOException { return param; } @@ -82,13 +82,10 @@ public class TestIPC { TestRpcServer rpcServer = new TestRpcServer(); rpcServer.start(); - HBaseClient client = new HBaseClient( - conf, - spyFactory); + HBaseClient client = new HBaseClient(conf, spyFactory); InetSocketAddress address = rpcServer.getListenerAddress(); - try { - client.call(RpcRequestBody.getDefaultInstance(), address, User.getCurrent(), 0); + client.call(null, null, address, null, User.getCurrent(), 0); fail("Expected an exception to have been thrown!"); } catch (Exception e) { LOG.info("Caught expected exception: " + e.toString()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java index c2866e5..26b0c92 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java @@ -28,6 +28,8 @@ import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponsePro import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto; import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto; import org.apache.hadoop.hbase.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; import org.junit.Assert; import org.junit.Test; import org.junit.Before; @@ -82,7 +84,10 @@ public class TestProtoBufRpc { @Before public void setUp() throws IOException { // Setup server for both protocols conf = new Configuration(); - + Logger log = Logger.getLogger("org.apache.hadoop.ipc.HBaseServer"); + log.setLevel(Level.DEBUG); + log = Logger.getLogger("org.apache.hadoop.ipc.HBaseServer.trace"); + log.setLevel(Level.TRACE); // Create server side implementation PBServerImpl serverImpl = new PBServerImpl(); // Get RPC server for server side implementation @@ -92,8 +97,7 @@ public class TestProtoBufRpc { addr = server.getListenerAddress(); server.start(); } - - + @After public void tearDown() throws Exception { server.stop(); @@ -103,14 +107,13 @@ public class TestProtoBufRpc { public void testProtoBufRpc() throws Exception { ProtobufRpcClientEngine clientEngine = new ProtobufRpcClientEngine(conf); try { - TestRpcService client = clientEngine.getProxy(TestRpcService.class, addr, conf, 10000); + TestRpcService client = clientEngine.getProxy(TestRpcService.class, addr, conf, 100000); // Test ping method EmptyRequestProto emptyRequest = EmptyRequestProto.newBuilder().build(); client.ping(null, emptyRequest); // Test echo method - EchoRequestProto echoRequest = EchoRequestProto.newBuilder() - .setMessage("hello").build(); + EchoRequestProto echoRequest = EchoRequestProto.newBuilder().setMessage("hello").build(); EchoResponseProto echoResponse = client.echo(null, echoRequest); Assert.assertEquals(echoResponse.getMessage(), "hello"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java index 8ba27bb..2eaeb3e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java @@ -28,58 +28,56 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.MediumTests; -import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody; -import org.apache.hadoop.hbase.regionserver.HRegionServer.QosFunction; -import org.junit.BeforeClass; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader; +import org.apache.hadoop.hbase.util.Pair; +import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; import com.google.protobuf.ByteString; +import com.google.protobuf.Message; /** * Tests that verify certain RPCs get a higher QoS. */ @Category(MediumTests.class) public class TestPriorityRpc { - static HRegionServer regionServer = null; - static QosFunction qosFunction = null; - @BeforeClass - public static void onetimeSetup() { + private HRegionServer regionServer = null; + private QosFunction qosFunction = null; + + @Before + public void setup() { Configuration conf = HBaseConfiguration.create(); - regionServer = - HRegionServer.constructRegionServer(HRegionServer.class, conf); + regionServer = HRegionServer.constructRegionServer(HRegionServer.class, conf); qosFunction = regionServer.getQosFunction(); } + @Test public void testQosFunctionForMeta() throws IOException { qosFunction = regionServer.getQosFunction(); - RpcRequestBody.Builder rpcRequestBuilder = RpcRequestBody.newBuilder(); + RequestHeader.Builder headerBuilder = RequestHeader.newBuilder(); //create a rpc request that has references to META region and also //uses one of the known argument classes (known argument classes are //listed in HRegionServer.QosFunction.knownArgumentClasses) - rpcRequestBuilder = RpcRequestBody.newBuilder(); - rpcRequestBuilder.setMethodName("foo"); + headerBuilder.setMethodName("foo"); GetRequest.Builder getRequestBuilder = GetRequest.newBuilder(); RegionSpecifier.Builder regionSpecifierBuilder = RegionSpecifier.newBuilder(); regionSpecifierBuilder.setType(RegionSpecifierType.REGION_NAME); - ByteString name = - ByteString.copyFrom(HRegionInfo.FIRST_META_REGIONINFO.getRegionName()); + ByteString name = ByteString.copyFrom(HRegionInfo.FIRST_META_REGIONINFO.getRegionName()); regionSpecifierBuilder.setValue(name); RegionSpecifier regionSpecifier = regionSpecifierBuilder.build(); getRequestBuilder.setRegion(regionSpecifier); Get.Builder getBuilder = Get.newBuilder(); getBuilder.setRow(ByteString.copyFrom("somerow".getBytes())); getRequestBuilder.setGet(getBuilder.build()); - rpcRequestBuilder.setRequest(getRequestBuilder.build().toByteString()); - rpcRequestBuilder.setRequestClassName(GetRequest.class.getCanonicalName()); - RpcRequestBody rpcRequest = rpcRequestBuilder.build(); + GetRequest getRequest = getRequestBuilder.build(); + RequestHeader header = headerBuilder.build(); HRegion mockRegion = Mockito.mock(HRegion.class); HRegionServer mockRS = Mockito.mock(HRegionServer.class); HRegionInfo mockRegionInfo = Mockito.mock(HRegionInfo.class); @@ -87,7 +85,8 @@ public class TestPriorityRpc { Mockito.when(mockRegion.getRegionInfo()).thenReturn(mockRegionInfo); Mockito.when(mockRegionInfo.isMetaTable()).thenReturn(true); qosFunction.setRegionServer(mockRS); - assertTrue (qosFunction.apply(rpcRequest) == HConstants.HIGH_QOS); + assertTrue (qosFunction.apply(new Pair(header, getRequest)) == + HConstants.HIGH_QOS); } @Test @@ -96,38 +95,39 @@ public class TestPriorityRpc { //known argument classes (it uses one random request class) //(known argument classes are listed in //HRegionServer.QosFunction.knownArgumentClasses) - RpcRequestBody.Builder rpcRequestBuilder = RpcRequestBody.newBuilder(); - rpcRequestBuilder.setMethodName("foo"); - rpcRequestBuilder.setRequestClassName(GetOnlineRegionRequest.class.getCanonicalName()); - RpcRequestBody rpcRequest = rpcRequestBuilder.build(); + RequestHeader.Builder headerBuilder = RequestHeader.newBuilder(); + headerBuilder.setMethodName("foo"); + RequestHeader header = headerBuilder.build(); QosFunction qosFunc = regionServer.getQosFunction(); - assertTrue (qosFunc.apply(rpcRequest) == HConstants.NORMAL_QOS); + assertTrue (qosFunc.apply(new Pair(header, null)) == + HConstants.NORMAL_QOS); } @Test public void testQosFunctionForScanMethod() throws IOException { - RpcRequestBody.Builder rpcRequestBuilder = RpcRequestBody.newBuilder(); - rpcRequestBuilder.setMethodName("scan"); + RequestHeader.Builder headerBuilder = RequestHeader.newBuilder(); + headerBuilder.setMethodName("scan"); + RequestHeader header = headerBuilder.build(); //build an empty scan request ScanRequest.Builder scanBuilder = ScanRequest.newBuilder(); - ByteString requestBody = scanBuilder.build().toByteString(); - rpcRequestBuilder.setRequest(requestBody); - RpcRequestBody rpcRequest = rpcRequestBuilder.build(); - assertTrue (qosFunction.apply(rpcRequest) == HConstants.NORMAL_QOS); + ScanRequest scanRequest = scanBuilder.build(); + HRegion mockRegion = Mockito.mock(HRegion.class); + HRegionServer mockRS = Mockito.mock(HRegionServer.class); + HRegionInfo mockRegionInfo = Mockito.mock(HRegionInfo.class); + Mockito.when(mockRS.getRegion((RegionSpecifier)Mockito.any())).thenReturn(mockRegion); + Mockito.when(mockRegion.getRegionInfo()).thenReturn(mockRegionInfo); + Mockito.when(mockRegionInfo.isMetaRegion()).thenReturn(false); + qosFunction.setRegionServer(mockRS); + int qos = qosFunction.apply(new Pair(header, scanRequest)); + assertTrue ("" + qos, qos == HConstants.NORMAL_QOS); //build a scan request with scannerID scanBuilder = ScanRequest.newBuilder(); scanBuilder.setScannerId(12345); - requestBody = scanBuilder.build().toByteString(); - rpcRequestBuilder.setRequest(requestBody); - rpcRequestBuilder.setRequestClassName(ScanRequest.class.getCanonicalName()); - rpcRequest = rpcRequestBuilder.build(); + scanRequest = scanBuilder.build(); //mock out a high priority type handling and see the QoS returned - HRegionServer mockRS = Mockito.mock(HRegionServer.class); RegionScanner mockRegionScanner = Mockito.mock(RegionScanner.class); - HRegionInfo mockRegionInfo = Mockito.mock(HRegionInfo.class); - HRegion mockRegion = Mockito.mock(HRegion.class); Mockito.when(mockRS.getScanner(12345)).thenReturn(mockRegionScanner); Mockito.when(mockRegionScanner.getRegionInfo()).thenReturn(mockRegionInfo); Mockito.when(mockRS.getRegion((RegionSpecifier)Mockito.any())).thenReturn(mockRegion); @@ -136,11 +136,12 @@ public class TestPriorityRpc { qosFunction.setRegionServer(mockRS); - assertTrue (qosFunction.apply(rpcRequest) == HConstants.HIGH_QOS); + assertTrue (qosFunction.apply(new Pair(header, scanRequest)) == + HConstants.HIGH_QOS); //the same as above but with non-meta region Mockito.when(mockRegionInfo.isMetaTable()).thenReturn(false); - assertTrue (qosFunction.apply(rpcRequest) == HConstants.NORMAL_QOS); + assertTrue (qosFunction.apply(new Pair(header, scanRequest)) == + HConstants.NORMAL_QOS); } - } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java index 9175bc2..2edd676 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java @@ -17,11 +17,10 @@ */ package org.apache.hadoop.hbase.util; -import static org.junit.Assert.*; +import static org.junit.Assert.assertNotNull; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.regionserver.HRegion;